Ns descriptor related stuffs. 67/64167/2
authorlaili <lai.li@zte.com.cn>
Mon, 3 Sep 2018 06:36:52 +0000 (14:36 +0800)
committerlaili <lai.li@zte.com.cn>
Mon, 3 Sep 2018 07:29:57 +0000 (15:29 +0800)
Refactor biz and view of nsd and pnfd.

Change-Id: I8b4a69607e191562b472d8a20f6e5701a9974f26
Issue-ID: VFC-1037
Signed-off-by: laili <lai.li@zte.com.cn>
catalog/packages/biz/common.py
catalog/packages/biz/ns_descriptor.py
catalog/packages/biz/pnf_descriptor.py
catalog/packages/tests/test_ns_descriptor.py
catalog/packages/tests/test_pnf_descriptor.py
catalog/packages/views/common.py [new file with mode: 0644]
catalog/packages/views/ns_descriptor_views.py
catalog/packages/views/pnf_descriptor_views.py

index b2fcee5..deef071 100644 (file)
@@ -17,6 +17,8 @@ import os
 from catalog.pub.config.config import CATALOG_ROOT_PATH
 from catalog.pub.utils import fileutil
 
+CHUNK_SIZE = 1024 * 8
+
 
 def save(remote_file, descriptor_id):
     local_file_name = remote_file.name
@@ -25,6 +27,16 @@ def save(remote_file, descriptor_id):
     if not os.path.exists(local_file_dir):
         fileutil.make_dirs(local_file_dir)
     with open(local_file_name, 'wb') as local_file:
-        for chunk in remote_file.chunks(chunk_size=1024 * 8):
+        for chunk in remote_file.chunks(chunk_size=CHUNK_SIZE):
             local_file.write(chunk)
     return local_file_name
+
+
+def read(file_path, start, end):
+    fp = open(file_path, 'rb')
+    fp.seek(start)
+    pos = start
+    while pos + CHUNK_SIZE < end:
+        yield fp.read(CHUNK_SIZE)
+        pos = fp.tell()
+    yield fp.read(end - pos)
index 8a89099..21b5735 100644 (file)
@@ -18,13 +18,13 @@ import logging
 import os
 import uuid
 
+from catalog.packages.biz.common import read, save
+from catalog.packages.const import PKG_STATUS
 from catalog.pub.config.config import CATALOG_ROOT_PATH
 from catalog.pub.database.models import NSPackageModel, PnfPackageModel, VnfPackageModel
 from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
 from catalog.pub.utils import fileutil, toscaparser
 from catalog.pub.utils.values import ignore_case_get
-from catalog.packages.const import PKG_STATUS
-from catalog.packages.biz.common import save
 
 logger = logging.getLogger(__name__)
 
@@ -59,7 +59,7 @@ class NsDescriptor(object):
         ns_pkgs = NSPackageModel.objects.all()
         response_data = []
         for ns_pkg in ns_pkgs:
-            data = fill_resp_data(ns_pkg)
+            data = self.fill_resp_data(ns_pkg)
             response_data.append(data)
         return response_data
 
@@ -68,7 +68,7 @@ class NsDescriptor(object):
         if not ns_pkgs.exists():
             logger.error('NSD(%s) does not exist.' % nsd_info_id)
             raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
-        return fill_resp_data(ns_pkgs[0])
+        return self.fill_resp_data(ns_pkgs[0])
 
     def delete_single(self, nsd_info_id):
         logger.info('Start to delete NSD(%s)...' % nsd_info_id)
@@ -89,7 +89,7 @@ class NsDescriptor(object):
         fileutil.delete_dirs(ns_pkg_path)
         logger.info('NSD(%s) has been deleted.' % nsd_info_id)
 
-    def upload(self, remote_file, nsd_info_id):
+    def upload(self, nsd_info_id, remote_file):
         logger.info('Start to upload NSD(%s)...' % nsd_info_id)
         ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
         if not ns_pkgs.exists():
@@ -101,7 +101,7 @@ class NsDescriptor(object):
         logger.info('NSD(%s) content has been uploaded.' % nsd_info_id)
         return local_file_name
 
-    def download(self, nsd_info_id):
+    def download(self, nsd_info_id, file_range):
         logger.info('Start to download NSD(%s)...' % nsd_info_id)
         ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
         if not ns_pkgs.exists():
@@ -110,93 +110,94 @@ class NsDescriptor(object):
         if ns_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
             logger.error('NSD(%s) is not ONBOARDED.' % nsd_info_id)
             raise CatalogException('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+
         local_file_path = ns_pkgs[0].localFilePath
-        local_file_name = local_file_path.split('/')[-1]
-        local_file_name = local_file_name.split('\\')[-1]
+        start, end = 0, os.path.getsize(local_file_path)
+        if file_range:
+            [start, end] = file_range.split('-')
+            start, end = start.strip(), end.strip()
+            start, end = int(start), int(end)
         logger.info('NSD(%s) has been downloaded.' % nsd_info_id)
-        return local_file_path, local_file_name, os.path.getsize(local_file_path)
-
-
-def parse_nsd_and_save(nsd_info_id, local_file_name):
-    logger.info('Start to process NSD(%s)...' % nsd_info_id)
-    ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
-    ns_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
-    nsd_json = toscaparser.parse_nsd(local_file_name)
-    nsd = json.JSONDecoder().decode(nsd_json)
-
-    nsd_id = nsd["metadata"]["id"]
-    if nsd_id and NSPackageModel.objects.filter(nsdId=nsd_id):
-        logger.info('NSD(%s) already exists.' % nsd_id)
-        raise CatalogException("NSD(%s) already exists." % nsd_id)
-
-    for vnf in nsd["vnfs"]:
-        vnfd_id = vnf["properties"]["id"]
-        pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
-        if not pkg:
-            logger.error("VNFD is not distributed.")
-            raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
-
-    ns_pkgs.update(
-        nsdId=nsd_id,
-        nsdName=nsd["metadata"].get("name", nsd_id),
-        nsdDesginer=nsd["metadata"].get("vendor", "undefined"),
-        nsdDescription=nsd["metadata"].get("description", ""),
-        nsdVersion=nsd["metadata"].get("version", "undefined"),
-        onboardingState=PKG_STATUS.ONBOARDED,
-        operationalState=PKG_STATUS.ENABLED,
-        usageState=PKG_STATUS.NOT_IN_USE,
-        nsPackageUri=local_file_name,
-        sdcCsarId=nsd_info_id,
-        localFilePath=local_file_name,
-        nsdModel=nsd_json
-    )
-    logger.info('NSD(%s) has been processed.' % nsd_info_id)
-
-
-def fill_resp_data(ns_pkg):
-    data = {
-        'id': ns_pkg.nsPackageId,
-        'nsdId': ns_pkg.nsdId,
-        'nsdName': ns_pkg.nsdName,
-        'nsdVersion': ns_pkg.nsdVersion,
-        'nsdDesigner': ns_pkg.nsdDesginer,
-        'nsdInvariantId': None,  # TODO
-        'vnfPkgIds': [],
-        'pnfdInfoIds': [],  # TODO
-        'nestedNsdInfoIds': [],  # TODO
-        'nsdOnboardingState': ns_pkg.onboardingState,
-        'onboardingFailureDetails': None,  # TODO
-        'nsdOperationalState': ns_pkg.operationalState,
-        'nsdUsageState': ns_pkg.usageState,
-        'userDefinedData': {},
-        '_links': None  # TODO
-    }
-
-    if ns_pkg.nsdModel:
-        nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
-        vnf_pkg_ids = []
-        for vnf in nsd_model['vnfs']:
-            vnfd_id = vnf["properties"]["id"]
-            pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
-            for pkg in pkgs:
-                vnf_pkg_ids.append(pkg.vnfPackageId)
-        data['vnfPkgIds'] = vnf_pkg_ids
+        return read(local_file_path, start, end)
 
-        pnf_info_ids = []
-        for pnf in nsd_model['pnfs']:
-            pnfd_id = pnf["properties"]["id"]
-            pkgs = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
-            for pkg in pkgs:
-                pnf_info_ids.append(pkg.pnfPackageId)
-        data['pnfInfoIds'] = pnf_info_ids  # TODO: need reconfirming
+    def parse_nsd_and_save(self, nsd_info_id, local_file_name):
+        logger.info('Start to process NSD(%s)...' % nsd_info_id)
+        ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        ns_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+        nsd_json = toscaparser.parse_nsd(local_file_name)
+        nsd = json.JSONDecoder().decode(nsd_json)
 
-    if ns_pkg.userDefinedData:
-        user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
-        data['userDefinedData'] = user_defined_data
+        nsd_id = nsd["metadata"]["id"]
+        if nsd_id and NSPackageModel.objects.filter(nsdId=nsd_id):
+            logger.info('NSD(%s) already exists.' % nsd_id)
+            raise CatalogException("NSD(%s) already exists." % nsd_id)
 
-    return data
+        for vnf in nsd["vnfs"]:
+            vnfd_id = vnf["properties"]["id"]
+            pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+            if not pkg:
+                logger.error("VNFD is not distributed.")
+                raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
+
+        ns_pkgs.update(
+            nsdId=nsd_id,
+            nsdName=nsd["metadata"].get("name", nsd_id),
+            nsdDesginer=nsd["metadata"].get("vendor", "undefined"),
+            nsdDescription=nsd["metadata"].get("description", ""),
+            nsdVersion=nsd["metadata"].get("version", "undefined"),
+            onboardingState=PKG_STATUS.ONBOARDED,
+            operationalState=PKG_STATUS.ENABLED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            nsPackageUri=local_file_name,
+            sdcCsarId=nsd_info_id,
+            localFilePath=local_file_name,
+            nsdModel=nsd_json
+        )
+        logger.info('NSD(%s) has been processed.' % nsd_info_id)
 
+    def fill_resp_data(self, ns_pkg):
+        data = {
+            'id': ns_pkg.nsPackageId,
+            'nsdId': ns_pkg.nsdId,
+            'nsdName': ns_pkg.nsdName,
+            'nsdVersion': ns_pkg.nsdVersion,
+            'nsdDesigner': ns_pkg.nsdDesginer,
+            'nsdInvariantId': None,  # TODO
+            'vnfPkgIds': [],
+            'pnfdInfoIds': [],  # TODO
+            'nestedNsdInfoIds': [],  # TODO
+            'nsdOnboardingState': ns_pkg.onboardingState,
+            'onboardingFailureDetails': None,  # TODO
+            'nsdOperationalState': ns_pkg.operationalState,
+            'nsdUsageState': ns_pkg.usageState,
+            'userDefinedData': {},
+            '_links': None  # TODO
+        }
+
+        if ns_pkg.nsdModel:
+            nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+            vnf_pkg_ids = []
+            for vnf in nsd_model['vnfs']:
+                vnfd_id = vnf["properties"]["id"]
+                pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+                for pkg in pkgs:
+                    vnf_pkg_ids.append(pkg.vnfPackageId)
+            data['vnfPkgIds'] = vnf_pkg_ids
+
+            pnf_info_ids = []
+            for pnf in nsd_model['pnfs']:
+                pnfd_id = pnf["properties"]["id"]
+                pkgs = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+                for pkg in pkgs:
+                    pnf_info_ids.append(pkg.pnfPackageId)
+            data['pnfInfoIds'] = pnf_info_ids  # TODO: need reconfirming
+
+        if ns_pkg.userDefinedData:
+            user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
+            data['userDefinedData'] = user_defined_data
+
+        return data
 
-def handle_upload_failed(nsd_info_id):
-    ns_pkg = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
-    ns_pkg.update(onboardingState=PKG_STATUS.CREATED)
+    def handle_upload_failed(self, nsd_info_id):
+        ns_pkg = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        ns_pkg.update(onboardingState=PKG_STATUS.CREATED)
index 8caf98b..ef93173 100644 (file)
@@ -29,7 +29,7 @@ from catalog.packages.biz.common import save
 logger = logging.getLogger(__name__)
 
 
-class PnfPackage(object):
+class PnfDescriptor(object):
 
     def __init__(self):
         pass
@@ -57,7 +57,7 @@ class PnfPackage(object):
         pnf_pkgs = PnfPackageModel.objects.all()
         response_data = []
         for pnf_pkg in pnf_pkgs:
-            data = fill_response_data(pnf_pkg)
+            data = self.fill_response_data(pnf_pkg)
             response_data.append(data)
         return response_data
 
@@ -66,7 +66,7 @@ class PnfPackage(object):
         if not pnf_pkgs.exists():
             logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
             raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
-        return fill_response_data(pnf_pkgs[0])
+        return self.fill_response_data(pnf_pkgs[0])
 
     def upload(self, remote_file, pnfd_info_id):
         logger.info('Start to upload PNFD(%s)...' % pnfd_info_id)
@@ -126,53 +126,50 @@ class PnfPackage(object):
         logger.info('PNFD(%s) has been downloaded.' % pnfd_info_id)
         return local_file_path, local_file_name, os.path.getsize(local_file_path)
 
+    def parse_pnfd_and_save(self, pnfd_info_id, local_file_name):
+        logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
+        pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+        pnf_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+        PnfPackageModel
+        pnfd_json = toscaparser.parse_pnfd(local_file_name)
+        pnfd = json.JSONDecoder().decode(pnfd_json)
+
+        pnfd_id = pnfd["metadata"]["id"]
+        if pnfd_id and PnfPackageModel.objects.filter(pnfdId=pnfd_id):
+            logger.info('PNFD(%s) already exists.' % pnfd_id)
+            raise CatalogException("PNFD(%s) already exists." % pnfd_id)
+
+        pnf_pkgs.update(
+            pnfdId=pnfd_id,
+            pnfdVersion=pnfd["metadata"].get("version", "undefined"),
+            pnfPackageUri=local_file_name,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            localFilePath=local_file_name,
+            pnfdModel=pnfd_json
+        )
+        logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
+
+    def fill_response_data(self, pnf_pkg):
+        data = {
+            'id': pnf_pkg.pnfPackageId,
+            'pnfdId': pnf_pkg.pnfdId,
+            'pnfdName': pnf_pkg.pnfdProductName,  # TODO: check
+            'pnfdVersion': pnf_pkg.pnfdVersion,
+            'pnfdProvider': pnf_pkg.pnfVendor,  # TODO: check
+            'pnfdInvariantId': None,  # TODO
+            'pnfdOnboardingState': pnf_pkg.onboardingState,
+            'onboardingFailureDetails': None,  # TODO
+            'pnfdUsageState': pnf_pkg.usageState,
+            'userDefinedData': {},
+            '_links': None  # TODO
+        }
+        if pnf_pkg.userDefinedData:
+            user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
+            data['userDefinedData'] = user_defined_data
+
+        return data
 
-def parse_pnfd_and_save(pnfd_info_id, local_file_name):
-    logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
-    pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
-    pnf_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
-    PnfPackageModel
-    pnfd_json = toscaparser.parse_pnfd(local_file_name)
-    pnfd = json.JSONDecoder().decode(pnfd_json)
-
-    pnfd_id = pnfd["metadata"]["id"]
-    if pnfd_id and PnfPackageModel.objects.filter(pnfdId=pnfd_id):
-        logger.info('PNFD(%s) already exists.' % pnfd_id)
-        raise CatalogException("PNFD(%s) already exists." % pnfd_id)
-
-    pnf_pkgs.update(
-        pnfdId=pnfd_id,
-        pnfdVersion=pnfd["metadata"].get("version", "undefined"),
-        pnfPackageUri=local_file_name,
-        onboardingState=PKG_STATUS.ONBOARDED,
-        usageState=PKG_STATUS.NOT_IN_USE,
-        localFilePath=local_file_name,
-        pnfdModel=pnfd_json
-    )
-    logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
-
-
-def fill_response_data(pnf_pkg):
-    data = {
-        'id': pnf_pkg.pnfPackageId,
-        'pnfdId': pnf_pkg.pnfdId,
-        'pnfdName': pnf_pkg.pnfdProductName,  # TODO: check
-        'pnfdVersion': pnf_pkg.pnfdVersion,
-        'pnfdProvider': pnf_pkg.pnfVendor,  # TODO: check
-        'pnfdInvariantId': None,  # TODO
-        'pnfdOnboardingState': pnf_pkg.onboardingState,
-        'onboardingFailureDetails': None,  # TODO
-        'pnfdUsageState': pnf_pkg.usageState,
-        'userDefinedData': {},
-        '_links': None  # TODO
-    }
-    if pnf_pkg.userDefinedData:
-        user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
-        data['userDefinedData'] = user_defined_data
-
-    return data
-
-
-def handle_upload_failed(pnf_pkg_id):
-    pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=pnf_pkg_id)
-    pnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
+    def handle_upload_failed(self, pnf_pkg_id):
+        pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=pnf_pkg_id)
+        pnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
index d934923..ec6357b 100644 (file)
 
 import copy
 import json
-import os
 import mock
+import os
 
 from django.test import TestCase
 from rest_framework import status
 from rest_framework.test import APIClient
-from catalog.pub.database.models import NSPackageModel, VnfPackageModel
-from catalog.pub.config.config import CATALOG_ROOT_PATH
-from catalog.pub.utils import toscaparser
+from catalog.packages.biz.ns_descriptor import NsDescriptor
 from catalog.packages.const import PKG_STATUS
 from catalog.packages.tests.const import nsd_data
-from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, VnfPackageModel
+from catalog.pub.utils import toscaparser
 
 
 class TestNsDescriptor(TestCase):
index fc60711..e67e35d 100644 (file)
@@ -22,8 +22,7 @@ import os
 from django.test import TestCase
 from rest_framework import status
 from rest_framework.test import APIClient
-
-from catalog.packages.biz.pnf_descriptor import PnfPackage
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
 from catalog.packages.const import PKG_STATUS
 from catalog.packages.tests.const import pnfd_data
 from catalog.pub.database.models import PnfPackageModel, NSPackageModel
@@ -236,38 +235,38 @@ class TestPnfDescriptor(TestCase):
         self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
         os.remove('pnfd_content.txt')
 
-    @mock.patch.object(PnfPackage, "create")
+    @mock.patch.object(PnfDescriptor, "create")
     def test_pnfd_create_when_catch_exception(self, mock_create):
         request_data = {'userDefinedData': self.user_defined_data}
         mock_create.side_effect = TypeError('integer type')
         response = self.client.post('/api/nsd/v1/pnf_descriptors', data=request_data, format='json')
         self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
 
-    @mock.patch.object(PnfPackage, "delete_single")
+    @mock.patch.object(PnfDescriptor, "delete_single")
     def test_delete_single_when_catch_exception(self, mock_delete_single):
         mock_delete_single.side_effect = TypeError("integer type")
         response = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
         self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
 
-    @mock.patch.object(PnfPackage, "query_single")
+    @mock.patch.object(PnfDescriptor, "query_single")
     def test_query_single_when_catch_exception(self, mock_query_single):
         mock_query_single.side_effect = TypeError("integer type")
         response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
         self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
 
-    @mock.patch.object(PnfPackage, "query_multiple")
+    @mock.patch.object(PnfDescriptor, "query_multiple")
     def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
         mock_query_muitiple.side_effect = TypeError("integer type")
         response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
         self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
 
-    @mock.patch.object(PnfPackage, "upload")
+    @mock.patch.object(PnfDescriptor, "upload")
     def test_upload_when_catch_exception(self, mock_upload):
         mock_upload.side_effect = TypeError("integer type")
         response = self.client.put("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
         self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
 
-    @mock.patch.object(PnfPackage, "download")
+    @mock.patch.object(PnfDescriptor, "download")
     def test_download_when_catch_exception(self, mock_download):
         mock_download.side_effect = TypeError("integer type")
         response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
diff --git a/catalog/packages/views/common.py b/catalog/packages/views/common.py
new file mode 100644 (file)
index 0000000..5dacb77
--- /dev/null
@@ -0,0 +1,27 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from catalog.pub.exceptions import CatalogException
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+    serialized_data = serializer(data=data)
+    if not serialized_data.is_valid():
+        logger.error('Data validation failed.')
+        raise CatalogException(serialized_data.errors)
+    return serialized_data
index a82b55b..8399243 100644 (file)
@@ -16,16 +16,18 @@ import logging
 import traceback
 
 from django.http import StreamingHttpResponse
-from catalog.packages.biz.ns_descriptor import NsDescriptor, parse_nsd_and_save, handle_upload_failed
-from catalog.packages.serializers.create_nsd_info_request import CreateNsdInfoRequestSerializer
-from catalog.packages.serializers.nsd_info import NsdInfoSerializer
-from catalog.packages.serializers.nsd_infos import NsdInfosSerializer
-from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
 from drf_yasg.utils import no_body, swagger_auto_schema
 from rest_framework import status
 from rest_framework.decorators import api_view
 from rest_framework.response import Response
 
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.packages.serializers.create_nsd_info_request import CreateNsdInfoRequestSerializer
+from catalog.packages.serializers.nsd_info import NsdInfoSerializer
+from catalog.packages.serializers.nsd_infos import NsdInfosSerializer
+from catalog.packages.views.common import validate_data
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+
 logger = logging.getLogger(__name__)
 
 
@@ -35,7 +37,7 @@ logger = logging.getLogger(__name__)
     request_body=no_body,
     responses={
         status.HTTP_200_OK: NsdInfoSerializer(),
-        status.HTTP_404_NOT_FOUND: 'NSDs do not exist.',
+        status.HTTP_404_NOT_FOUND: 'NSDs do not exist',
         status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
     }
 )
@@ -49,32 +51,32 @@ logger = logging.getLogger(__name__)
     }
 )
 @api_view(http_method_names=['GET', 'DELETE'])
-def ns_info_rd(request, nsdInfoId):   # TODO
+def ns_info_rd(request, **kwargs):
+    nsd_info_id = kwargs.get("nsdInfoId")
     if request.method == 'GET':
         try:
-            data = NsDescriptor().query_single(nsdInfoId)
+            data = NsDescriptor().query_single(nsd_info_id)
             nsd_info = validate_data(data, NsdInfoSerializer)
             return Response(data=nsd_info.data, status=status.HTTP_200_OK)
         except ResourceNotFoundException as e:
             logger.error(e.message)
-            return Response(data={'error': 'NSDs do not exist.'}, status=status.HTTP_404_NOT_FOUND)
+            error_data = {'error': e.message}
+            error_code = status.HTTP_404_NOT_FOUND
         except Exception as e:
             logger.error(e.message)
             logger.error(traceback.format_exc())
-            error_msg = {'error': 'Query of a NSD failed.'}
-        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_data = {'error': 'Query of NSD(%s) failed.' % nsd_info_id}
+            error_code = status.HTTP_500_INTERNAL_SERVER_ERROR
+        return Response(data=error_data, status=error_code)
 
     if request.method == 'DELETE':
         try:
-            NsDescriptor().delete_single(nsdInfoId)
+            NsDescriptor().delete_single(nsd_info_id)
             return Response(status=status.HTTP_204_NO_CONTENT)
-        except CatalogException as e:
-            logger.error(e.message)
-            error_msg = {'error': 'Deletion of a NSD failed.'}
         except Exception as e:
             logger.error(e.message)
             logger.error(traceback.format_exc())
-            error_msg = {'error': 'Deletion of a NSD failed.'}
+            error_msg = {'error': 'Deletion of NSD(%s) failed.' % nsd_info_id}
         return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
 
@@ -97,16 +99,13 @@ def ns_info_rd(request, nsdInfoId):   # TODO
     }
 )
 @api_view(http_method_names=['POST', 'GET'])
-def ns_descriptors_rc(request, *args, **kwargs):
+def ns_descriptors_rc(request):
     if request.method == 'POST':
         try:
             create_nsd_info_requst = validate_data(request.data, CreateNsdInfoRequestSerializer)
             data = NsDescriptor().create(create_nsd_info_requst.data)
             nsd_info = validate_data(data, NsdInfoSerializer)
             return Response(data=nsd_info.data, status=status.HTTP_201_CREATED)
-        except CatalogException as e:
-            logger.error(e.message)
-            error_msg = {'error': 'Creating a NSD failed.'}
         except Exception as e:
             logger.error(e.message)
             logger.error(traceback.format_exc())
@@ -145,69 +144,42 @@ def ns_descriptors_rc(request, *args, **kwargs):
     }
 )
 @api_view(http_method_names=['PUT', 'GET'])
-def nsd_content_ru(request, *args, **kwargs):
+def nsd_content_ru(request, **kwargs):
     nsd_info_id = kwargs.get("nsdInfoId")
     if request.method == 'PUT':
         files = request.FILES.getlist('file')
         try:
-            local_file_name = NsDescriptor().upload(files[0], nsd_info_id)
-            parse_nsd_and_save(nsd_info_id, local_file_name)
+            local_file_name = NsDescriptor().upload(nsd_info_id, files[0])
+            NsDescriptor().parse_nsd_and_save(nsd_info_id, local_file_name)
             return Response(data=None, status=status.HTTP_204_NO_CONTENT)
         except CatalogException as e:
-            handle_upload_failed(nsd_info_id)
+            NsDescriptor().handle_upload_failed(nsd_info_id)
             logger.error(e.message)
-            error_msg = {'error': 'Uploading NSD content failed.'}
+            error_data = {'error': e.message}
         except Exception as e:
-            handle_upload_failed(nsd_info_id)
+            NsDescriptor().handle_upload_failed(nsd_info_id)
             logger.error(e.message)
             logger.error(traceback.format_exc())
-            error_msg = {'error': 'Uploading NSD content failed.'}
-        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_data = {'error': 'Uploading NSD(%s) failed.'}
+        return Response(data=error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
     if request.method == 'GET':
         try:
-            file_path, file_name, file_size = NsDescriptor().download(nsd_info_id)
-            start, end = 0, file_size
             file_range = request.META.get('RANGE')
-            if file_range:
-                [start, end] = file_range.split('-')
-                start, end = start.strip(), end.strip()
-                start, end = int(start), int(end)
-            response = StreamingHttpResponse(
-                read_partial_file(file_path, start, end),
-                status=status.HTTP_200_OK
-            )
-            response['Content-Range'] = '%s-%s' % (start, end)
-            response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8')
-            response['Content-Length'] = end - start
+            file_iterator = NsDescriptor().download(nsd_info_id, file_range)
+            response = StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
             return response
         except ResourceNotFoundException as e:
             logger.error(e.message)
-            return Response(data={'error': 'NSD does not exist.'}, status=status.HTTP_404_NOT_FOUND)
+            error_data = {'error': e.message}
+            error_code = status.HTTP_404_NOT_FOUND
         except CatalogException as e:
             logger.error(e.message)
-            error_msg = {'error': 'Downloading NSD content failed.'}
+            error_data = {'error': e.message}
+            error_code = status.HTTP_500_INTERNAL_SERVER_ERROR
         except Exception as e:
             logger.error(e.message)
             logger.error(traceback.format_exc())
-            error_msg = {'error': 'Downloading NSD content failed.'}
-        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
-
-
-def read_partial_file(file_path, start, end):
-    fp = open(file_path, 'rb')
-    fp.seek(start)
-    pos = start
-    CHUNK_SIZE = 1024 * 8
-    while pos + CHUNK_SIZE < end:
-        yield fp.read(CHUNK_SIZE)
-        pos = fp.tell()
-    yield fp.read(end - pos)
-
-
-def validate_data(data, serializer):
-    serialized_data = serializer(data=data)
-    if not serialized_data.is_valid():
-        logger.error('Data validation failed.')
-        raise CatalogException(serialized_data.errors)
-    return serialized_data
+            error_data = {'error': 'Downloading NSD(%s) failed.' % nsd_info_id}
+            error_code = status.HTTP_500_INTERNAL_SERVER_ERROR
+        return Response(data=error_data, status=error_code)
index f5430e6..09b242a 100644 (file)
@@ -16,17 +16,16 @@ import logging
 import traceback
 
 from django.http import FileResponse
-
-from catalog.packages.biz.pnf_descriptor import PnfPackage, parse_pnfd_and_save, handle_upload_failed
-from catalog.packages.serializers.create_pnfd_info_request import CreatePnfdInfoRequestSerializer
-from catalog.packages.serializers.pnfd_info import PnfdInfoSerializer
-from catalog.packages.serializers.pnfd_infos import PnfdInfosSerializer
-from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
 from drf_yasg.utils import no_body, swagger_auto_schema
 from rest_framework import status
 from rest_framework.decorators import api_view
 from rest_framework.response import Response
-from catalog.packages.views.ns_descriptor_views import validate_data
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.serializers.create_pnfd_info_request import CreatePnfdInfoRequestSerializer
+from catalog.packages.serializers.pnfd_info import PnfdInfoSerializer
+from catalog.packages.serializers.pnfd_infos import PnfdInfosSerializer
+from catalog.packages.views.common import validate_data
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
 
 logger = logging.getLogger(__name__)
 
@@ -55,7 +54,7 @@ def pnfd_info_rd(request, pnfdInfoId):  # TODO
     if request.method == 'GET':
         logger.debug("Query an individual PNF descriptor> %s" % request.data)
         try:
-            data = PnfPackage().query_single(pnfdInfoId)
+            data = PnfDescriptor().query_single(pnfdInfoId)
             pnfd_info = validate_data(data, PnfdInfoSerializer)
             return Response(data=pnfd_info.data, status=status.HTTP_200_OK)
         except ResourceNotFoundException as e:
@@ -70,7 +69,7 @@ def pnfd_info_rd(request, pnfdInfoId):  # TODO
     if request.method == 'DELETE':
         logger.debug("Delete an individual PNFD resource> %s" % request.data)
         try:
-            PnfPackage().delete_single(pnfdInfoId)
+            PnfDescriptor().delete_single(pnfdInfoId)
             return Response(data=None, status=status.HTTP_204_NO_CONTENT)
         except CatalogException as e:
             logger.error(e.message)
@@ -105,7 +104,7 @@ def pnf_descriptors_rc(request, *args, **kwargs):
     if request.method == 'POST':
         try:
             create_pnfd_info_request = validate_data(request.data, CreatePnfdInfoRequestSerializer)
-            data = PnfPackage().create(create_pnfd_info_request.data)
+            data = PnfDescriptor().create(create_pnfd_info_request.data)
             pnfd_info = validate_data(data, PnfdInfoSerializer)
             return Response(data=pnfd_info.data, status=status.HTTP_201_CREATED)
         except CatalogException as e:
@@ -119,7 +118,7 @@ def pnf_descriptors_rc(request, *args, **kwargs):
 
     if request.method == 'GET':
         try:
-            data = PnfPackage().query_multiple()
+            data = PnfDescriptor().query_multiple()
             pnfd_infos = validate_data(data, PnfdInfosSerializer)
             return Response(data=pnfd_infos.data, status=status.HTTP_200_OK)
         except Exception as e:
@@ -154,15 +153,15 @@ def pnfd_content_ru(request, *args, **kwargs):
     if request.method == 'PUT':
         files = request.FILES.getlist('file')
         try:
-            local_file_name = PnfPackage().upload(files[0], pnfd_info_id)
-            parse_pnfd_and_save(pnfd_info_id, local_file_name)
+            local_file_name = PnfDescriptor().upload(files[0], pnfd_info_id)
+            PnfDescriptor().parse_pnfd_and_save(pnfd_info_id, local_file_name)
             return Response(data=None, status=status.HTTP_204_NO_CONTENT)
         except CatalogException as e:
-            handle_upload_failed(pnfd_info_id)
+            PnfDescriptor().handle_upload_failed(pnfd_info_id)
             logger.error(e.message)
             error_msg = {'error': 'Uploading PNFD content failed.'}
         except Exception as e:
-            handle_upload_failed(pnfd_info_id)
+            PnfDescriptor().handle_upload_failed(pnfd_info_id)
             logger.error(e.message)
             logger.error(traceback.format_exc())
             error_msg = {'error': 'Uploading PNFD content failed.'}
@@ -170,7 +169,7 @@ def pnfd_content_ru(request, *args, **kwargs):
 
     if request.method == 'GET':
         try:
-            file_path, file_name, file_size = PnfPackage().download(pnfd_info_id)
+            file_path, file_name, file_size = PnfDescriptor().download(pnfd_info_id)
             response = FileResponse(open(file_path, 'rb'), status=status.HTTP_200_OK)
             response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8')
             response['Content-Length'] = file_size