From: laili Date: Mon, 27 Aug 2018 12:49:09 +0000 (+0800) Subject: Ns descriptor related stuffs. X-Git-Tag: 1.2.0~45 X-Git-Url: https://gerrit.onap.org/r/gitweb?a=commitdiff_plain;h=refs%2Fchanges%2F71%2F63071%2F3;p=vfc%2Fnfvo%2Fcatalog.git Ns descriptor related stuffs. Refractor exception handling. Modify test. Change-Id: I99b7ff30cbf3a2bc8383e4dea96ee8cd59885b24 Issue-ID: VFC-1037 Signed-off-by: laili --- diff --git a/catalog/packages/biz/ns_descriptor.py b/catalog/packages/biz/ns_descriptor.py index 4846bfab..c63304bf 100644 --- a/catalog/packages/biz/ns_descriptor.py +++ b/catalog/packages/biz/ns_descriptor.py @@ -12,22 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. + import json import logging import os import uuid from catalog.pub.config.config import CATALOG_ROOT_PATH -from catalog.pub.utils import fileutil -from catalog.pub.utils.values import ignore_case_get from catalog.pub.database.models import NSPackageModel, PnfPackageModel, VnfPackageModel from catalog.pub.exceptions import CatalogException -from catalog.pub.utils import toscaparser +from catalog.pub.utils import fileutil, toscaparser +from catalog.pub.utils.values import ignore_case_get logger = logging.getLogger(__name__) def create(data): + logger.info('Start to create a NSD...') user_defined_data = ignore_case_get(data, 'userDefinedData') data = { 'id': str(uuid.uuid4()), @@ -44,13 +45,15 @@ def create(data): usageState=data['nsdUsageState'], userDefinedData=data['userDefinedData'] ).save() + logger.info('A NSD(%s) has been created.' % data['id']) return data def query_multiple(): ns_pkgs = NSPackageModel.objects.all() if not ns_pkgs.exists(): - raise CatalogException('NS descriptors do not exist.') + logger.error('NSDs do not exist.') + raise CatalogException('NSDs do not exist.') response_data = [] for ns_pkg in ns_pkgs: data = fill_resp_data(ns_pkg) @@ -61,41 +64,70 @@ def query_multiple(): def query_single(nsd_info_id): ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id) if not ns_pkgs.exists(): - raise CatalogException('NS descriptors(%s) does not exist.' % nsd_info_id) + logger.error('NSD(%s) does not exist.' % nsd_info_id) + raise CatalogException('NSD(%s) does not exist.' % nsd_info_id) return fill_resp_data(ns_pkgs[0]) def delete_single(nsd_info_id): + logger.info('Start to delete NSD(%s)...' % nsd_info_id) ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id) if not ns_pkgs.exists(): - logger.debug('NS descriptor (%s) is deleted.' % nsd_info_id) + logger.info('NSD(%s) is deleted.' % nsd_info_id) return + if ns_pkgs[0].onboardingState == 'ONBOARDED': - raise CatalogException('The NS descriptor (%s) shall be non-ONBOARDED.' % nsd_info_id) + logger.error('NSD(%s) shall be non-ONBOARDED.' % nsd_info_id) + raise CatalogException('NSD(%s) shall be non-ONBOARDED.' % nsd_info_id) if ns_pkgs[0].operationalState != 'DISABLED': - raise CatalogException('The NS descriptor (%s) shall be DISABLED.' % nsd_info_id) + logger.error('NSD(%s) shall be DISABLED.' % nsd_info_id) + raise CatalogException('NSD(%s) shall be DISABLED.' % nsd_info_id) if ns_pkgs[0].usageState != 'NOT_IN_USE': - raise CatalogException('The NS descriptor (%s) shall be NOT_IN_USE.' % nsd_info_id) + logger.error('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id) + raise CatalogException('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id) + ns_pkgs.delete() ns_pkg_path = os.path.join(CATALOG_ROOT_PATH, nsd_info_id) fileutil.delete_dirs(ns_pkg_path) - logger.debug('NS descriptor (%s) is deleted.' % nsd_info_id) + logger.info('NSD(%s) has been deleted.' % nsd_info_id) + + +def upload(remote_file, nsd_info_id): + logger.info('Start to upload NSD(%s)...' % nsd_info_id) + ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id) + if not ns_pkgs.exists(): + logger.info('NSD(%s) does not exist.' % nsd_info_id) + raise CatalogException('NSD(%s) does not exist.' % nsd_info_id) + + ns_pkgs[0].onboardingState = 'UPLOADING' # TODO: if failed, should be set to created + local_file_name = remote_file.name + local_file_dir = os.path.join(CATALOG_ROOT_PATH, nsd_info_id) + local_file_name = os.path.join(local_file_dir, local_file_name) + if not os.path.exists(local_file_dir): + fileutil.make_dirs(local_file_dir) + with open(local_file_name, 'wb') as local_file: + for chunk in remote_file.chunks(chunk_size=1024 * 8): + local_file.write(chunk) + logger.info('NSD(%s) content has been uploaded.' % nsd_info_id) def process(nsd_info_id, local_file_name): + logger.info('Start to process NSD(%s)...' % nsd_info_id) + ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id) + ns_pkgs[0].onboardingState = 'PROCESSING' # TODO: if failed, should be set to created nsd_json = toscaparser.parse_nsd(local_file_name) nsd = json.JSONDecoder().decode(nsd_json) nsd_id = nsd["metadata"]["id"] - if nsd_id and NSPackageModel.objects.filter(nsdId=nsd_id): # nsd_id may not exist - raise CatalogException("NS Descriptor (%s) already exists." % nsd_id) + if nsd_id and NSPackageModel.objects.filter(nsdId=nsd_id): + logger.info('"NSD(%s) already exists." % nsd_id') + raise CatalogException("NSD(%s) already exists." % nsd_id) for vnf in nsd["vnfs"]: vnfd_id = vnf["properties"]["id"] pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id) if not pkg: - vnfd_name = vnf.get("vnf_id", "undefined") - logger.error("[%s] is not distributed.", vnfd_name) + logger.error("VNFD is not distributed.") raise CatalogException("VNF package(%s) is not distributed." % vnfd_id) NSPackageModel( @@ -110,34 +142,18 @@ def process(nsd_info_id, local_file_name): localFilePath=local_file_name, nsdModel=nsd_json ).save() - - -def upload(remote_file, nsd_info_id): - ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id) - if not ns_pkgs.exists(): - raise CatalogException('The NS descriptor (%s) does not exist.' % nsd_info_id) - - local_file_name = remote_file.name - local_file_dir = os.path.join(CATALOG_ROOT_PATH, nsd_info_id) - local_file_name = os.path.join(local_file_dir, local_file_name) - if not os.path.exists(local_file_dir): - fileutil.make_dirs(local_file_dir) - with open(local_file_name, 'wb') as local_file: - if remote_file.multiple_chunks(chunk_size=None): - for chunk in remote_file.chunks(): - local_file.write(chunk) - else: - data = remote_file.read() - local_file.write(data) + logger.info('NSD(%s) has been processed.' % nsd_info_id) def download(nsd_info_id): + logger.info('Start to download NSD(%s)...' % nsd_info_id) ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id) if not ns_pkgs.exists(): raise CatalogException('The NS Descriptor (%s) does not exist.' % nsd_info_id) if ns_pkgs[0].onboardingState != 'ONBOARDED': raise CatalogException('The NS Descriptor (%s) is not ONBOARDED.' % nsd_info_id) local_file_path = ns_pkgs[0].localFilePath + logger.info('NSD(%s) has been downloaded.' % nsd_info_id) return local_file_path diff --git a/catalog/packages/tests/test_ns_descriptor.py b/catalog/packages/tests/test_ns_descriptor.py index 285a1cd4..02b91a72 100644 --- a/catalog/packages/tests/test_ns_descriptor.py +++ b/catalog/packages/tests/test_ns_descriptor.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import copy import json import os @@ -31,14 +31,29 @@ class TestNsDescriptor(TestCase): 'key2': 'value2', 'key3': 'value3', } + self.expected_nsd_info = { + 'id': None, + 'nsdId': None, + 'nsdName': None, + 'nsdVersion': None, + 'nsdDesigner': None, + 'nsdInvariantId': None, + 'vnfPkgIds': [], + 'pnfdInfoIds': [], + 'nestedNsdInfoIds': [], + 'nsdOnboardingState': 'CREATED', + 'onboardingFailureDetails': None, + 'nsdOperationalState': 'DISABLED', + 'nsdUsageState': 'NOT_IN_USE', + 'userDefinedData': self.user_defined_data, + '_links': None + } def tearDown(self): pass def test_nsd_create_normal(self): - reqest_data = { - 'userDefinedData': self.user_defined_data - } + reqest_data = {'userDefinedData': self.user_defined_data} expected_reponse_data = { 'nsdOnboardingState': 'CREATED', 'nsdOperationalState': 'DISABLED', @@ -46,6 +61,7 @@ class TestNsDescriptor(TestCase): 'userDefinedData': self.user_defined_data, '_links': None } + response = self.client.post( '/api/nsd/v1/ns_descriptors', data=reqest_data, @@ -57,41 +73,12 @@ class TestNsDescriptor(TestCase): def test_query_multiple_nsds_normal(self): expected_reponse_data = [ - { - 'id': '0', - 'nsdId': None, - 'nsdName': None, - 'nsdVersion': None, - 'nsdDesigner': None, - 'nsdInvariantId': None, - 'vnfPkgIds': [], - 'pnfdInfoIds': [], - 'nestedNsdInfoIds': [], - 'nsdOnboardingState': 'CREATED', - 'onboardingFailureDetails': None, - 'nsdOperationalState': 'DISABLED', - 'nsdUsageState': 'NOT_IN_USE', - 'userDefinedData': self.user_defined_data, - '_links': None - }, - { - 'id': '1', - 'nsdId': None, - 'nsdName': None, - 'nsdVersion': None, - 'nsdDesigner': None, - 'nsdInvariantId': None, - 'vnfPkgIds': [], - 'pnfdInfoIds': [], - 'nestedNsdInfoIds': [], - 'nsdOnboardingState': 'CREATED', - 'onboardingFailureDetails': None, - 'nsdOperationalState': 'DISABLED', - 'nsdUsageState': 'NOT_IN_USE', - 'userDefinedData': self.user_defined_data, - '_links': None - } + copy.deepcopy(self.expected_nsd_info), + copy.deepcopy(self.expected_nsd_info) ] + expected_reponse_data[0]['id'] = '0' + expected_reponse_data[1]['id'] = '1' + user_defined_data = json.JSONEncoder().encode(self.user_defined_data) for i in range(2): NSPackageModel( @@ -101,28 +88,15 @@ class TestNsDescriptor(TestCase): usageState='NOT_IN_USE', userDefinedData=user_defined_data ).save() + response = self.client.get('/api/nsd/v1/ns_descriptors', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(expected_reponse_data, response.data) def test_query_single_nsd_normal(self): - expected_reponse_data = { - 'id': '22', - 'nsdId': None, - 'nsdName': None, - 'nsdVersion': None, - 'nsdDesigner': None, - 'nsdInvariantId': None, - 'vnfPkgIds': [], - 'pnfdInfoIds': [], - 'nestedNsdInfoIds': [], - 'nsdOnboardingState': 'CREATED', - 'onboardingFailureDetails': None, - 'nsdOperationalState': 'DISABLED', - 'nsdUsageState': 'NOT_IN_USE', - 'userDefinedData': self.user_defined_data, - '_links': None - } + expected_reponse_data = copy.deepcopy(self.expected_nsd_info) + expected_reponse_data['id'] = '22' + user_defined_data = json.JSONEncoder().encode(self.user_defined_data) NSPackageModel( nsPackageId='22', @@ -145,9 +119,10 @@ class TestNsDescriptor(TestCase): userDefinedData=user_defined_data, nsdModel='test' ).save() - resp = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json') - self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual({}, resp.data) + + response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json') + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(None, response.data) def test_nsd_content_upload_normal(self): user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data) @@ -157,6 +132,7 @@ class TestNsDescriptor(TestCase): usageState='NOT_IN_USE', userDefinedData=user_defined_data_json, ).save() + with open('nsd_content.txt', 'wb') as fp: fp.write('test') with open('nsd_content.txt', 'rb') as fp: @@ -169,7 +145,7 @@ class TestNsDescriptor(TestCase): data = fp.read() file_content = '%s%s' % (file_content, data) self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual({}, resp.data) + self.assertEqual(None, resp.data) self.assertEqual(file_content, 'test') os.remove('nsd_content.txt') @@ -188,6 +164,7 @@ class TestNsDescriptor(TestCase): onboardingState='ONBOARDED', localFilePath='nsd_content.txt' ).save() + response = self.client.get( "/api/nsd/v1/ns_descriptors/23/nsd_content", RANGE='5-10', diff --git a/catalog/packages/urls.py b/catalog/packages/urls.py index 861f0ca7..28a60145 100644 --- a/catalog/packages/urls.py +++ b/catalog/packages/urls.py @@ -30,16 +30,15 @@ urlpatterns = [ url(r'^api/nsd/v1/ns_descriptors$', ns_descriptor_views.ns_descriptors_rc, name='ns_descriptors_rc'), url(r'^api/nsd/v1/ns_descriptors/(?P[0-9a-zA-Z\-\_]+)$', ns_descriptor_views.ns_info_rd, name='ns_info_rd'), url(r'^api/nsd/v1/ns_descriptors/(?P[0-9a-zA-Z\-\_]+)/nsd_content$', ns_descriptor_views.nsd_content_ru, name='nsd_content_ru'), + # url(r'^api/nsd/v1/subscriptions', nsd_subscriptions.as_view(), name='subscriptions_rc'), + # url(r'^api/nsd/v1/subscriptions/(?P[0-9a-zA-Z\-\_]+)$', nsd_subscription.as_view(), name='subscription_rd'), # PNF url(r'^api/nsd/v1/pnf_descriptors$', pnf_descriptor_views.pnf_descriptors_rc, name='pnf_descriptors_rc'), url(r'^api/nsd/v1/pnf_descriptors/(?P[0-9a-zA-Z\-\_]+)$', pnf_descriptor_views.pnfd_info_rd, name='pnfd_info_rd'), url(r'^api/nsd/v1/pnf_descriptors/(?P[0-9a-zA-Z\-\_]+)/pnfd_content$', pnf_descriptor_views.pnfd_content_ru, name='pnfd_content_ru'), - # TODO SOL005 & SOL003 - - # url(r'^api/nsd/v1/subscriptions', nsd_subscriptions.as_view(), name='subscriptions_rc'), - # url(r'^api/nsd/v1/subscriptions/(?P[0-9a-zA-Z\-\_]+)$', nsd_subscription.as_view(), name='subscription_rd'), + # TODO SOL005 & SOL003 url(r'^api/vnfpkgm/v1/vnf_packages$', vnf_package_views.vnf_packages_rc, name='vnf_packages_rc'), url(r'^api/vnfpkgm/v1/vnf_packages/(?P[0-9a-zA-Z\-\_]+)$', vnf_package_views.vnf_package_rd, name='vnf_package_rd'), # url(r'^api/vnfpkgm/v1/vnf_packages/(?P[0-9a-zA-Z\-\_]+)/vnfd$', vnfd.as_view(), name='vnfd_r'), diff --git a/catalog/packages/views/ns_descriptor_views.py b/catalog/packages/views/ns_descriptor_views.py index 86785ac4..b9e12471 100644 --- a/catalog/packages/views/ns_descriptor_views.py +++ b/catalog/packages/views/ns_descriptor_views.py @@ -16,26 +16,23 @@ import logging import os import traceback -from drf_yasg.utils import no_body, swagger_auto_schema -from rest_framework import status -from rest_framework.decorators import api_view -from rest_framework.response import Response -from django.http import FileResponse from django.http import StreamingHttpResponse - -from catalog.packages.biz.ns_descriptor import create, query_multiple, query_single, delete_single, upload, download -from catalog.packages.serializers.create_nsd_info_request import \ - CreateNsdInfoRequestSerializer +from catalog.packages.biz.ns_descriptor import create, delete_single, download, query_multiple, query_single, upload +from catalog.packages.serializers.create_nsd_info_request import CreateNsdInfoRequestSerializer from catalog.packages.serializers.nsd_info import NsdInfoSerializer from catalog.packages.serializers.nsd_infos import NsdInfosSerializer from catalog.pub.exceptions import CatalogException +from drf_yasg.utils import no_body, swagger_auto_schema +from rest_framework import status +from rest_framework.decorators import api_view +from rest_framework.response import Response logger = logging.getLogger(__name__) @swagger_auto_schema( method='GET', - operation_description="Query an individual NS descriptor resource", + operation_description="Query a NSD", request_body=no_body, responses={ status.HTTP_200_OK: NsdInfoSerializer(), @@ -44,10 +41,10 @@ logger = logging.getLogger(__name__) ) @swagger_auto_schema( method='DELETE', - operation_description="Delete an individual NS descriptor resource", + operation_description="Delete a NSD", request_body=no_body, responses={ - status.HTTP_204_NO_CONTENT: {}, + status.HTTP_204_NO_CONTENT: None, status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error" } ) @@ -56,32 +53,34 @@ def ns_info_rd(request, nsdInfoId): if request.method == 'GET': try: data = query_single(nsdInfoId) - nsd_info = NsdInfoSerializer(data=data) - if not nsd_info.is_valid(): - raise CatalogException + nsd_info = validate_data(data, NsdInfoSerializer) return Response(data=nsd_info.data, status=status.HTTP_200_OK) - except CatalogException: + except CatalogException as e: + logger.error(e.message) + error_msg = {'error': 'Query of a NSD failed.'} + except Exception as e: + logger.error(e.message) logger.error(traceback.format_exc()) - return Response( - data={'error': 'Query of an individual NS descriptor resource failed.'}, - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) + error_msg = {'error': 'Query of a NSD failed.'} + return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'DELETE': try: - data = delete_single(nsdInfoId) - return Response(data={}, status=status.HTTP_204_NO_CONTENT) - except CatalogException: + delete_single(nsdInfoId) + return Response(status=status.HTTP_204_NO_CONTENT) + except CatalogException as e: + logger.error(e.message) + error_msg = {'error': 'Deletion of a NSD failed.'} + except Exception as e: + logger.error(e.message) logger.error(traceback.format_exc()) - return Response( - data={'error': 'Deletion of an individual NS descriptor resource failed.'}, - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) + error_msg = {'error': 'Deletion of a NSD failed.'} + return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR) @swagger_auto_schema( method='POST', - operation_description="Create an individual NS descriptor resource", + operation_description="Create a NSD", request_body=CreateNsdInfoRequestSerializer(), responses={ status.HTTP_201_CREATED: NsdInfoSerializer(), @@ -90,7 +89,7 @@ def ns_info_rd(request, nsdInfoId): ) @swagger_auto_schema( method='GET', - operation_description="Query multiple NS descriptor resources", + operation_description="Query multiple NSDs", request_body=no_body, responses={ status.HTTP_200_OK: NsdInfosSerializer(), @@ -101,31 +100,32 @@ def ns_info_rd(request, nsdInfoId): def ns_descriptors_rc(request, *args, **kwargs): if request.method == 'POST': try: - create_nsd_info_requst = CreateNsdInfoRequestSerializer(data=request.data) - if not create_nsd_info_requst.is_valid(): - raise CatalogException + create_nsd_info_requst = validate_data(request.data, CreateNsdInfoRequestSerializer) data = create(create_nsd_info_requst.data) - nsd_info = NsdInfoSerializer(data=data) - if not nsd_info.is_valid(): - raise CatalogException + nsd_info = validate_data(data, NsdInfoSerializer) return Response(data=nsd_info.data, status=status.HTTP_201_CREATED) - except CatalogException: + except CatalogException as e: + logger.error(e.message) + error_msg = {'error': 'Creating a NSD failed.'} + except Exception as e: + logger.error(e.message) logger.error(traceback.format_exc()) - return Response(data={'error': 'Creating nsd info failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + error_msg = {'error': 'Creating a NSD failed.'} + return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': try: data = query_multiple() - nsd_infos = NsdInfosSerializer(data=data) - if not nsd_infos.is_valid(): - raise CatalogException + nsd_infos = validate_data(data, NsdInfosSerializer) return Response(data=nsd_infos.data, status=status.HTTP_200_OK) - except CatalogException: + except CatalogException as e: + logger.error(e.message) + error_msg = {'error': 'Query of multiple NSDs failed.'} + except Exception as e: + logger.error(e.message) logger.error(traceback.format_exc()) - return Response( - data={'error': 'Query of multiple NS descriptor resources failed.'}, - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) + error_msg = {'error': 'Query of multiple NSDs failed.'} + return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR) @swagger_auto_schema( @@ -139,10 +139,10 @@ def ns_descriptors_rc(request, *args, **kwargs): ) @swagger_auto_schema( method='GET', - operation_description="Fetch NSD content", + operation_description="Download NSD content", request_body=no_body, responses={ - status.HTTP_204_NO_CONTENT: {}, + status.HTTP_204_NO_CONTENT: None, status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error" } ) @@ -153,11 +153,12 @@ def nsd_content_ru(request, *args, **kwargs): files = request.FILES.getlist('file') try: upload(files[0], nsd_info_id) - return Response(data={}, status=status.HTTP_204_NO_CONTENT) - except IOError: + return Response(data=None, status=status.HTTP_204_NO_CONTENT) + except Exception as e: + logger.error(e.message) logger.error(traceback.format_exc()) - raise CatalogException - return Response(data={'error': 'Uploading nsd content failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + error_msg = {'error': 'Uploading NSD content failed.'} + return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': try: @@ -165,26 +166,25 @@ def nsd_content_ru(request, *args, **kwargs): file_name = file_path.split('/')[-1] file_name = file_name.split('\\')[-1] + start, end = 0, os.path.getsize(file_path) file_range = request.META.get('RANGE') if file_range: [start, end] = file_range.split('-') start, end = start.strip(), end.strip() start, end = int(start), int(end) - response = StreamingHttpResponse( - read_partial_file(file_path, start, end), - status=status.HTTP_200_OK - ) - response['Content-Range'] = file_range - else: - response = FileResponse(open(file_path, 'rb'), status=status.HTTP_200_OK) + response = StreamingHttpResponse( + read_partial_file(file_path, start, end), + status=status.HTTP_200_OK + ) + response['Content-Range'] = '%s-%s' % (start, end) response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8') - response['Content-Length'] = os.path.getsize(file_path) + response['Content-Length'] = end - start return response - except IOError: + except Exception as e: + logger.error(e.message) logger.error(traceback.format_exc()) - raise CatalogException - return Response(data={'error': 'Downloading nsd content failed.'}, - status=status.HTTP_500_INTERNAL_SERVER_ERROR) + error_msg = {'error': 'Downloading NSD content failed.'} + return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR) def read_partial_file(file_path, start, end): @@ -196,3 +196,11 @@ def read_partial_file(file_path, start, end): yield fp.read(CHUNK_SIZE) pos = fp.tell() yield fp.read(end - pos) + + +def validate_data(data, serializer): + serialized_data = serializer(data=data) + if not serialized_data.is_valid(): + logger.error('Data validation failed.') + raise CatalogException(serialized_data.error) + return serialized_data