Ns descriptor related stuffs. 73/63173/2
authorlaili <lai.li@zte.com.cn>
Tue, 28 Aug 2018 02:58:02 +0000 (10:58 +0800)
committerlaili <lai.li@zte.com.cn>
Tue, 28 Aug 2018 03:08:06 +0000 (11:08 +0800)
Refactor exception handling of pnfd.

Change-Id: Ic2871bc18060553ae5b9fd750dc29e285e86ab2a
Issue-ID: VFC-1037
Signed-off-by: laili <lai.li@zte.com.cn>
catalog/packages/biz/ns_descriptor.py
catalog/packages/biz/pnf_descriptor.py
catalog/packages/tests/test_pnf_descriptor.py
catalog/packages/views/ns_descriptor_views.py
catalog/packages/views/pnf_descriptor_views.py

index c63304b..424406c 100644 (file)
@@ -120,7 +120,7 @@ def process(nsd_info_id, local_file_name):
 
     nsd_id = nsd["metadata"]["id"]
     if nsd_id and NSPackageModel.objects.filter(nsdId=nsd_id):
-        logger.info('"NSD(%s) already exists." % nsd_id')
+        logger.info('NSD(%s) already exists.' % nsd_id)
         raise CatalogException("NSD(%s) already exists." % nsd_id)
 
     for vnf in nsd["vnfs"]:
@@ -149,9 +149,11 @@ def download(nsd_info_id):
     logger.info('Start to download NSD(%s)...' % nsd_info_id)
     ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
     if not ns_pkgs.exists():
-        raise CatalogException('The NS Descriptor (%s) does not exist.' % nsd_info_id)
+        logger.error('NSD(%s) does not exist.' % nsd_info_id)
+        raise CatalogException('NSD(%s) does not exist.' % nsd_info_id)
     if ns_pkgs[0].onboardingState != 'ONBOARDED':
-        raise CatalogException('The NS Descriptor (%s) is not ONBOARDED.' % nsd_info_id)
+        logger.error('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+        raise CatalogException('NSD(%s) is not ONBOARDED.' % nsd_info_id)
     local_file_path = ns_pkgs[0].localFilePath
     logger.info('NSD(%s) has been downloaded.' % nsd_info_id)
     return local_file_path
index 4d11e4e..0e2a477 100644 (file)
@@ -19,16 +19,16 @@ import os
 import uuid
 
 from catalog.pub.config.config import CATALOG_ROOT_PATH
-from catalog.pub.utils import fileutil
-from catalog.pub.utils.values import ignore_case_get
 from catalog.pub.database.models import NSPackageModel, PnfPackageModel
 from catalog.pub.exceptions import CatalogException
-from catalog.pub.utils import toscaparser
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.pub.utils.values import ignore_case_get
 
 logger = logging.getLogger(__name__)
 
 
 def create(data):
+    logger.info('Start to create a PNFD...')
     user_defined_data = ignore_case_get(data, 'userDefinedData')
     data = {
         'id': str(uuid.uuid4()),
@@ -43,13 +43,15 @@ def create(data):
         usageState=data['pnfdUsageState'],
         userDefinedData=data['userDefinedData']
     ).save()
+    logger.info('A PNFD(%s) has been created.' % data['id'])
     return data
 
 
 def query_multiple():
     pnf_pkgs = PnfPackageModel.objects.all()
     if not pnf_pkgs.exists():
-        raise CatalogException('PNF descriptors do not exist.')
+        logger.error('PNFDs do not exist.')
+        raise CatalogException('PNFDs do not exist.')
     response_data = []
     for pnf_pkg in pnf_pkgs:
         data = fill_response_data(pnf_pkg)
@@ -60,17 +62,41 @@ def query_multiple():
 def query_single(pnfd_info_id):
     pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
     if not pnf_pkgs.exists():
-        raise CatalogException('PNF descriptor (%s) does not exist.' % pnfd_info_id)
+        logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+        raise CatalogException('PNFD(%s) does not exist.' % pnfd_info_id)
     return fill_response_data(pnf_pkgs[0])
 
 
+def upload(remote_file, pnfd_info_id):
+    logger.info('Start to upload PNFD(%s)...' % pnfd_info_id)
+    pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+    if not pnf_pkgs.exists():
+        logger.info('PNFD(%s) does not exist.' % pnfd_info_id)
+        raise CatalogException('PNFD (%s) does not exist.' % pnfd_info_id)
+
+    pnf_pkgs[0].onboardingState = 'UPLOADING'
+    local_file_name = remote_file.name  # TODO: common method
+    local_file_dir = os.path.join(CATALOG_ROOT_PATH, pnfd_info_id)
+    local_file_name = os.path.join(local_file_dir, local_file_name)
+    if not os.path.exists(local_file_dir):
+        fileutil.make_dirs(local_file_dir)
+    with open(local_file_name, 'wb') as local_file:
+        for chunk in remote_file.chunks(chunk_size=1024 * 8):
+            local_file.write(chunk)
+    logger.info('PNFD(%s) content has been uploaded.' % pnfd_info_id)
+
+
 def process(pnfd_info_id, local_file_name):  # TODO: onboardingState changes
+    logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
+    pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+    pnf_pkgs[0].onboardingState = 'PROCESSING'  # TODO: if failed, should be set to created
     pnfd_json = toscaparser.parse_pnfd(local_file_name)
     pnfd = json.JSONDecoder().decode(pnfd_json)
 
     pnfd_id = pnfd["metadata"]["id"]
-    if pnfd_id and PnfPackageModel.objects.filter(pnfdId=pnfd_id):  # pnfd_id may not exist
-        raise CatalogException("NS Descriptor (%s) already exists." % pnfd_id)
+    if pnfd_id and PnfPackageModel.objects.filter(pnfdId=pnfd_id):
+        logger.info('PNFD(%s) already exists.' % pnfd_id)
+        raise CatalogException("PNFD(%s) already exists." % pnfd_id)
 
     PnfPackageModel(
         pnfPackageId=pnfd_info_id,
@@ -84,46 +110,33 @@ def process(pnfd_info_id, local_file_name):  # TODO: onboardingState changes
         localFilePath=local_file_name,
         pnfdModel=pnfd_json
     ).save()
-
-
-def upload(files, pnfd_info_id):
-    ns_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
-    if not ns_pkgs.exists():
-        raise CatalogException('The NS descriptor (%s) does not exist.' % pnfd_info_id)
-
-    remote_files = files
-    for remote_file in remote_files:
-        local_file_name = remote_file.name
-        local_file_dir = os.path.join(CATALOG_ROOT_PATH, pnfd_info_id)
-        local_file_name = os.path.join(local_file_dir, local_file_name)
-        if not os.path.exists(local_file_dir):
-            fileutil.make_dirs(local_file_dir)
-        with open(local_file_name, 'wb') as local_file:
-            if remote_file.multiple_chunks(chunk_size=None):  # TODO: chunk_size
-                for chunk in remote_file.chunks():
-                    local_file.write(chunk)
-            else:
-                data = remote_file.read()
-                local_file.write(data)
+    logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
 
 
 def download(pnfd_info_id):
+    logger.info('Start to download PNFD(%s)...' % pnfd_info_id)
     pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
     if not pnf_pkgs.exists():
-        raise CatalogException('The PNF Descriptor (%s) does not exist.' % pnfd_info_id)
+        logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+        raise CatalogException('PNFD(%s) does not exist.' % pnfd_info_id)
     if pnf_pkgs[0].onboardingState != 'ONBOARDED':
-        raise CatalogException('The PNF Descriptor (%s) is not ONBOARDED.' % pnfd_info_id)
+        logger.error('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+        raise CatalogException('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
     local_file_path = pnf_pkgs[0].localFilePath
+    logger.info('PNFD(%s) has been downloaded.' % pnfd_info_id)
     return local_file_path
 
 
-def delete_pnf(pnfd_info_id):
+def delete_single(pnfd_info_id):
+    logger.info('Start to delete PNFD(%s)...' % pnfd_info_id)
     pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
     if not pnf_pkgs.exists():
-        logger.debug('PNF descriptor (%s) is deleted.' % pnfd_info_id)
+        logger.info('PNFD(%s) is deleted.' % pnfd_info_id)
         return
+
     if pnf_pkgs[0].usageState != 'NOT_IN_USE':
-        raise CatalogException('The PNF descriptor (%s) shall be NOT_IN_USE.' % pnfd_info_id)
+        logger.info('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+        raise CatalogException('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
     ns_pkgs = NSPackageModel.objects.all()
     for ns_pkg in ns_pkgs:
         pnf_info_ids = []
@@ -133,12 +146,13 @@ def delete_pnf(pnfd_info_id):
             for pkg in pkgs:
                 pnf_info_ids.append(pkg.pnfPackageId)
         if pnfd_info_id in pnf_info_ids:
-            raise CatalogException('The PNF descriptor (%s) is referenced.' % pnfd_info_id)
-            break
+            logger.info('PNFD(%s) is referenced.' % pnfd_info_id)
+            raise CatalogException('PNFD(%s) is referenced.' % pnfd_info_id)
+
     pnf_pkgs.delete()
     vnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, pnfd_info_id)
     fileutil.delete_dirs(vnf_pkg_path)
-    logger.debug('PNF descriptor (%s) is deleted.' % pnfd_info_id)
+    logger.debug('PNFD(%s) has been deleted.' % pnfd_info_id)
 
 
 def fill_response_data(pnf_pkg):
index 4245645..0fc9c65 100644 (file)
@@ -173,7 +173,7 @@ class TestPnfDescriptor(TestCase):
                 {'file': fp},
             )
         self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
-        self.assertEqual({}, resp.data)
+        self.assertEqual(None, resp.data)
 
         os.remove('pnfd_content.txt')
 
index b9e1247..19f9d1d 100644 (file)
@@ -49,7 +49,7 @@ logger = logging.getLogger(__name__)
     }
 )
 @api_view(http_method_names=['GET', 'DELETE'])
-def ns_info_rd(request, nsdInfoId):
+def ns_info_rd(request, nsdInfoId):   # TODO
     if request.method == 'GET':
         try:
             data = query_single(nsdInfoId)
@@ -133,7 +133,7 @@ def ns_descriptors_rc(request, *args, **kwargs):
     operation_description="Upload NSD content",
     request_body=no_body,
     responses={
-        status.HTTP_204_NO_CONTENT: {},
+        status.HTTP_204_NO_CONTENT: 'PNFD file',
         status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
     }
 )
index b571f01..bf7f1cd 100644 (file)
 # limitations under the License.
 
 import logging
+import os
 import traceback
 
-from drf_yasg.utils import no_body, swagger_auto_schema
-from rest_framework import status
-from rest_framework.decorators import api_view
-from rest_framework.response import Response
 from django.http import FileResponse
 
-
-from catalog.packages.biz.pnf_descriptor import create, query_multiple, query_single, upload, download, delete_pnf
-from catalog.packages.serializers.create_pnfd_info_request import \
-    CreatePnfdInfoRequestSerializer
+from catalog.packages.biz.pnf_descriptor import create, delete_single, download, query_multiple, query_single, upload
+from catalog.packages.serializers.create_pnfd_info_request import CreatePnfdInfoRequestSerializer
 from catalog.packages.serializers.pnfd_info import PnfdInfoSerializer
 from catalog.packages.serializers.pnfd_infos import PnfdInfosSerializer
 from catalog.pub.exceptions import CatalogException
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
 
 logger = logging.getLogger(__name__)
 
 
 @swagger_auto_schema(
     method='GET',
-    operation_description="Query an individual PNF descriptor resource",
+    operation_description="Query a PNFD",
     request_body=no_body,
     responses={
         status.HTTP_200_OK: PnfdInfoSerializer(),
@@ -43,7 +42,7 @@ logger = logging.getLogger(__name__)
 )
 @swagger_auto_schema(
     method='DELETE',
-    operation_description="Delete an individual PNF descriptor resource",
+    operation_description="Delete a PNFD",
     request_body=no_body,
     responses={
         status.HTTP_204_NO_CONTENT: None,
@@ -51,42 +50,40 @@ logger = logging.getLogger(__name__)
     }
 )
 @api_view(http_method_names=['GET', 'DELETE'])
-def pnfd_info_rd(request, pnfdInfoId):
+def pnfd_info_rd(request, pnfdInfoId):  # TODO
     if request.method == 'GET':
         logger.debug("Query an individual PNF descriptor> %s" % request.data)
         try:
-            res = query_single(pnfdInfoId)
-            query_serializer = PnfdInfoSerializer(data=res)
-            if not query_serializer.is_valid():
-                raise CatalogException
-            return Response(data=query_serializer.data, status=status.HTTP_200_OK)
-        except CatalogException:
-            logger.error(traceback.format_exc())
-            return Response(data={'error': 'Query an individual PNF descriptor failed.'},
-                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            data = query_single(pnfdInfoId)
+            pnfd_info = validate_data(data, PnfdInfoSerializer)
+            return Response(data=pnfd_info.data, status=status.HTTP_200_OK)
+        except CatalogException as e:
+            logger.error(e.message)
+            error_msg = {'error': 'Query of a PNFD failed.'}
         except Exception as e:
             logger.error(e.message)
             logger.error(traceback.format_exc())
-            return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_msg = {'error': 'Query of a PNFD failed.'}
+        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
     if request.method == 'DELETE':
         logger.debug("Delete an individual PNFD resource> %s" % request.data)
         try:
-            delete_pnf(pnfdInfoId)
+            delete_single(pnfdInfoId)
             return Response(data=None, status=status.HTTP_204_NO_CONTENT)
-        except CatalogException:
-            logger.error(traceback.format_exc())
-            return Response(data={'error': 'Delete an individual PNFD resource failed.'},
-                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+        except CatalogException as e:
+            logger.error(e.message)
+            error_msg = {'error': 'Deletion of a PNFD failed.'}
         except Exception as e:
             logger.error(e.message)
             logger.error(traceback.format_exc())
-            return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_msg = {'error': 'Deletion of a PNFD failed.'}
+        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
 
 @swagger_auto_schema(
     method='POST',
-    operation_description="Create an individual PNF descriptor resource",
+    operation_description="Create a  PNFD",
     request_body=CreatePnfdInfoRequestSerializer(),
     responses={
         status.HTTP_201_CREATED: PnfdInfoSerializer(),
@@ -95,7 +92,7 @@ def pnfd_info_rd(request, pnfdInfoId):
 )
 @swagger_auto_schema(
     method='GET',
-    operation_description="Query multiple PNF descriptor resources",
+    operation_description="Query multiple PNFDs",
     request_body=no_body,
     responses={
         status.HTTP_200_OK: PnfdInfosSerializer(),
@@ -106,31 +103,32 @@ def pnfd_info_rd(request, pnfdInfoId):
 def pnf_descriptors_rc(request, *args, **kwargs):
     if request.method == 'POST':
         try:
-            create_pnfd_info_request = CreatePnfdInfoRequestSerializer(data=request.data)
-            if not create_pnfd_info_request.is_valid():
-                raise CatalogException
+            create_pnfd_info_request = validate_data(request.data, CreatePnfdInfoRequestSerializer)
             data = create(create_pnfd_info_request.data)
-            pnfd_info = PnfdInfoSerializer(data=data)
-            if not pnfd_info.is_valid():
-                raise CatalogException
+            pnfd_info = validate_data(data, PnfdInfoSerializer)
             return Response(data=pnfd_info.data, status=status.HTTP_201_CREATED)
-        except CatalogException:
+        except CatalogException as e:
+            logger.error(e.message)
+            error_msg = {'error': 'Creating a pnfd failed.'}
+        except Exception as e:
+            logger.error(e.message)
             logger.error(traceback.format_exc())
-            return Response(data={'error': 'Creating pnfd info failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_msg = {'error': 'Creating a pnfd failed.'}
+        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
     if request.method == 'GET':
         try:
             data = query_multiple()
-            pnfd_infos = PnfdInfosSerializer(data=data)
-            if not pnfd_infos.is_valid():
-                raise CatalogException
+            pnfd_infos = validate_data(data, PnfdInfosSerializer)
             return Response(data=pnfd_infos.data, status=status.HTTP_200_OK)
-        except CatalogException:
+        except CatalogException as e:
+            logger.error(e.message)
+            error_msg = {'error': 'Query of multiple PNFDs failed.'}
+        except Exception as e:
+            logger.error(e.message)
             logger.error(traceback.format_exc())
-            return Response(
-                data={'error': 'Query of multiple PNF descriptor resources failed.'},
-                status=status.HTTP_500_INTERNAL_SERVER_ERROR
-            )
+            error_msg = {'error': 'Query of multiple PNFDs failed.'}
+        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
 
 @swagger_auto_schema(
@@ -138,7 +136,7 @@ def pnf_descriptors_rc(request, *args, **kwargs):
     operation_description="Upload PNFD content",
     request_body=no_body,
     responses={
-        status.HTTP_204_NO_CONTENT: {},
+        status.HTTP_204_NO_CONTENT: None,
         status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
     }
 )
@@ -147,7 +145,7 @@ def pnf_descriptors_rc(request, *args, **kwargs):
     operation_description="Fetch PNFD content",
     request_body=no_body,
     responses={
-        status.HTTP_204_NO_CONTENT: {},
+        status.HTTP_204_NO_CONTENT: 'PNFD file',
         status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
     }
 )
@@ -157,12 +155,13 @@ def pnfd_content_ru(request, *args, **kwargs):
     if request.method == 'PUT':
         files = request.FILES.getlist('file')
         try:
-            upload(files, pnfd_info_id)
-            return Response(data={}, status=status.HTTP_204_NO_CONTENT)
-        except IOError:
+            upload(files[0], pnfd_info_id)
+            return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+        except Exception as e:
+            logger.error(e.message)
             logger.error(traceback.format_exc())
-            raise CatalogException
-            return Response(data={'error': 'Uploading pnfd content failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_msg = {'error': 'Uploading PNFD content failed.'}
+        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
 
     if request.method == 'GET':
         try:
@@ -171,9 +170,18 @@ def pnfd_content_ru(request, *args, **kwargs):
             file_name = file_name.split('\\')[-1]
             response = FileResponse(open(file_path, 'rb'), status=status.HTTP_200_OK)
             response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8')
+            response['Content-Length'] = os.path.getsize(file_path)
             return response
-        except IOError:
+        except Exception as e:
+            logger.error(e.message)
             logger.error(traceback.format_exc())
-            raise CatalogException
-            return Response(data={'error': 'Downloading pnfd content failed.'},
-                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+            error_msg = {'error': 'Downloading PNFD content failed.'}
+        return Response(data=error_msg, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+def validate_data(data, serializer):
+    serialized_data = serializer(data=data)
+    if not serialized_data.is_valid():
+        logger.error('Data validation failed.')
+        raise CatalogException(serialized_data.error)
+    return serialized_data