From: Fu Jinhua Date: Wed, 27 Sep 2017 01:13:52 +0000 (+0000) Subject: Merge "edit activity workflow plan for NS INIT" X-Git-Tag: v1.0.0~28 X-Git-Url: https://gerrit.onap.org/r/gitweb?a=commitdiff_plain;h=12f8357386eefb363bdedd8d651bc914f537cfcb;hp=f4586a7718f10100e4cb76c57d98a96edbd07fca;p=vfc%2Fnfvo%2Flcm.git Merge "edit activity workflow plan for NS INIT" --- diff --git a/lcm/ns/vls/create_vls.py b/lcm/ns/vls/create_vls.py index 0065ff68..1d89c3d0 100644 --- a/lcm/ns/vls/create_vls.py +++ b/lcm/ns/vls/create_vls.py @@ -158,7 +158,17 @@ class CreateVls(object): "vlanTransparent": str(self.vl_properties.get("vlan_transparent", "")), "routerExternal": self.route_external, "resourceProviderType": "", - "resourceProviderId": ""} + "resourceProviderId": "", + "subnet_list": [{ + "subnet_name": self.vl_properties.get("name", ""), + "cidr": self.vl_properties.get("cidr", "192.168.0.0/24"), + "ip_version": self.vl_properties.get("ip_version", const.IPV4), + "enable_dhcp": self.vl_properties.get("dhcp_enabled", False), + "gateway_ip": self.vl_properties.get("gateway_ip", ""), + "dns_nameservers": self.vl_properties.get("dns_nameservers", ""), + "host_routes": self.vl_properties.get("host_routes", "") + }] + } resmgr.create_vl(req_param) def create_vl_inst_id_in_vnffg(self): diff --git a/lcm/ns/vnfs/create_vnfs.py b/lcm/ns/vnfs/create_vnfs.py index 0af4c6d0..7a17a4b5 100644 --- a/lcm/ns/vnfs/create_vnfs.py +++ b/lcm/ns/vnfs/create_vnfs.py @@ -16,8 +16,8 @@ import logging import traceback import uuid from threading import Thread -from lcm.ns.const import OWNER_TYPE +from lcm.ns.const import OWNER_TYPE from lcm.ns.vnfs.const import VNF_STATUS, NFVO_VNF_INST_TIMEOUT_SECOND, INST_TYPE, INST_TYPE_NAME from lcm.ns.vnfs.wait_job import wait_job_finish from lcm.pub.config.config import REPORT_TO_AAI @@ -279,16 +279,39 @@ class CreateVnfs(Thread): "vnf-type": "vnf-type-test111", "service-id": self.ns_inst_id, "in-maint": True, - "is-closed-loop-disabled": False + "is-closed-loop-disabled": False, + "relationship-list": { + "relationship": [ + { + "related-to": "service-instance", + "relationship-data": [ + { + "relationship-key": "customer.global-customer-id", + "relationship-value": "global-customer-id-" + self.ns_inst_id + }, + { + "relationship-key": "service-subscription.service-type", + "relationship-value": "service-type-" + self.ns_inst_id + }, + { + "relationship-key": "service-instance.service-instance-id", + "relationship-value": self.ns_inst_id + } + ] + } + ] + } } resp_data, resp_status = create_vnf_aai(self.nf_inst_id, data) if resp_data: - logger.debug("Fail to create vnf instance[%s] to aai, resp_status: [%s]." % (self.nf_inst_id, resp_status)) + logger.debug("Fail to create vnf instance[%s] to aai, vnf instance=[%s], resp_status: [%s]." + % (self.nf_inst_id, self.ns_inst_id, resp_status)) else: - logger.debug("Success to create vnf instance[%s] to aai, resp_status: [%s]." % (self.nf_inst_id, resp_status)) + logger.debug("Success to create vnf instance[%s] to aai, vnf instance=[%s], resp_status: [%s]." + % (self.nf_inst_id, self.ns_inst_id, resp_status)) def create_vserver_in_aai(self): - logger.debug("create_vserver_in_aai start!") + logger.debug("CreateVnfs::create_vserver_in_aai::report vserver instance to aai.") cloud_owner, cloud_region_id = split_vim_to_owner_region(self.vim_id) # query vim_info from aai @@ -303,16 +326,27 @@ class CreateVnfs(Thread): "prov-status": "ACTIVE", "vserver-selflink": "", "in-maint": True, - "is-closed-loop-disabled": False + "is-closed-loop-disabled": False, + "relationship-list": { + "relationship": [ + { + "related-to": "generic-vnf", + "relationship-data": [ + { + "relationship-key": "generic-vnf.vnf-id", + "relationship-value": self.nf_inst_id + } + ] + } + ] + } } # create vserver instance in aai resp_data, resp_status = create_vserver_aai(cloud_owner, cloud_region_id, tenant_id, vserver_id, data) if resp_data: - logger.debug( - "Fail to create vserver instance[%s] to aai, resp_status: [%s]." % (vserver_id, resp_status)) + logger.debug("Fail to create vserver instance[%s] to aai, vnf instance=[%s], resp_status: [%s]." + % (vserver_id, self.nf_inst_id, resp_status)) else: - logger.debug( - "Success to create vserver instance[%s] to aai, resp_status: [%s]." % (vserver_id, resp_status)) - - logger.debug("create_vserver_in_aai end!") + logger.debug("Success to create vserver instance[%s] to aai, vnf instance=[%s], resp_status: [%s]." + % (vserver_id, self.nf_inst_id, resp_status)) diff --git a/lcm/pub/utils/toscaparser/basemodel.py b/lcm/pub/utils/toscaparser/basemodel.py index c912b1f7..b80b275f 100644 --- a/lcm/pub/utils/toscaparser/basemodel.py +++ b/lcm/pub/utils/toscaparser/basemodel.py @@ -15,6 +15,7 @@ import copy import ftplib import json +import logging import os import re import shutil @@ -26,6 +27,8 @@ from toscaparser.tosca_template import ToscaTemplate from lcm.pub.utils.toscaparser.dataentityext import DataEntityExt +logger = logging.getLogger(__name__) + class BaseInfoModel(object): @@ -36,17 +39,17 @@ class BaseInfoModel(object): valid_params = self._validate_input_params(file_name, params) return self._create_tosca_template(file_name, valid_params) finally: - if file_name != None and file_name != path and os.path.exists(file_name): + if file_name is not None and file_name != path and os.path.exists(file_name): try: os.remove(file_name) - except Exception, e: - pass + except Exception as e: + logger.error("Failed to parse package, error: %s", e.message) def _validate_input_params(self, path, params): valid_params = {} if params and len(params) > 0: tmp = self._create_tosca_template(path, None) - for key,value in params.items(): + for key, value in params.items(): if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0: for input_def in tmp.inputs: if (input_def.name == key): @@ -63,11 +66,11 @@ class BaseInfoModel(object): print "-----------------------------" return tosca_tpl finally: - if tosca_tpl != None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir): + if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir): try: shutil.rmtree(tosca_tpl.temp_dir) - except Exception, e: - pass + except Exception as e: + logger.error("Failed to create tosca template, error: %s", e.message) def _check_download_file(self, path): if (path.startswith("ftp") or path.startswith("sftp")): @@ -126,10 +129,9 @@ class BaseInfoModel(object): sftp = paramiko.SFTPClient.from_transport(t) sftp.get(remoteFileName, localFileName) finally: - if t != None: + if t is not None: t.close() - def ftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName): f = None try: @@ -140,7 +142,7 @@ class BaseInfoModel(object): ftp.retrbinary('RETR ' + remoteFileName, f.write, 1024) f.close() finally: - if f != None: + if f is not None: f.close() def buidMetadata(self, tosca): @@ -164,7 +166,6 @@ class BaseInfoModel(object): properties[k] = str(item) return properties - def verify_properties(self, props, inputs, parsed_params): ret_props = {} if (props and len(props) > 0): @@ -189,26 +190,24 @@ class BaseInfoModel(object): if (isinstance(req_value, dict)): if ('node' in req_value and req_value['node'] not in node_template.templates): continue # No target requirement for aria parser, not add to result. - rets.append({req_name : req_value}) + rets.append({req_name: req_value}) return rets def buildCapabilities(self, nodeTemplate, inputs, ret): capabilities = json.dumps(nodeTemplate.entity_tpl.get('capabilities', None)) - match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}',capabilities) + match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', capabilities) for m in match: - aa= [input_def for input_def in inputs - if m == input_def.name][0] - capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities,1) + aa = [input_def for input_def in inputs if m == input_def.name][0] + capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities, 1) if capabilities != 'null': ret['capabilities'] = json.loads(capabilities) def buildArtifacts(self, nodeTemplate, inputs, ret): artifacts = json.dumps(nodeTemplate.entity_tpl.get('artifacts', None)) - match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}',artifacts) + match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', artifacts) for m in match: - aa= [input_def for input_def in inputs - if m == input_def.name][0] - artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts,1) + aa = [input_def for input_def in inputs if m == input_def.name][0] + artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts, 1) if artifacts != 'null': ret['artifacts'] = json.loads(artifacts) @@ -227,8 +226,9 @@ class BaseInfoModel(object): return node['nodeType'].upper().find('.CP.') >= 0 or node['nodeType'].upper().endswith('.CP') def isVl(self, node): - return node['nodeType'].upper().find('.VIRTUALLINK.') >= 0 or node['nodeType'].upper().find('.VL.') >= 0 or \ - node['nodeType'].upper().endswith('.VIRTUALLINK') or node['nodeType'].upper().endswith('.VL') + isvl = node['nodeType'].upper().find('.VIRTUALLINK.') >= 0 or node['nodeType'].upper().find('.VL.') >= 0 + isvl = isvl or node['nodeType'].upper().endswith('.VIRTUALLINK') or node['nodeType'].upper().endswith('.VL') + return isvl def isService(self, node): return node['nodeType'].upper().find('.SERVICE.') >= 0 or node['nodeType'].upper().endswith('.SERVICE') @@ -252,7 +252,6 @@ class BaseInfoModel(object): def getVirtualbindings(self, node): return self.getRequirementByName(node, 'virtualbinding') - def getRequirementByName(self, node, requirementName): requirements = [] if 'requirements' in node: @@ -267,8 +266,8 @@ class BaseInfoModel(object): if 'requirements' in node: for item in node['requirements']: for key, value in item.items(): - if key.upper().find('VIRTUALLINK') >=0: - rets.append({"key_name":key, "vl_id":self.get_requirement_node_name(value)}) + if key.upper().find('VIRTUALLINK') >= 0: + rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)}) return rets def _verify_value(self, value, inputs, parsed_params): diff --git a/lcm/pub/utils/toscaparser/dataentityext.py b/lcm/pub/utils/toscaparser/dataentityext.py index 6ca20668..825e93bb 100644 --- a/lcm/pub/utils/toscaparser/dataentityext.py +++ b/lcm/pub/utils/toscaparser/dataentityext.py @@ -16,8 +16,8 @@ from toscaparser.dataentity import DataEntity from toscaparser.elements.constraints import Schema from toscaparser.common.exception import ExceptionCollector -class DataEntityExt(object): +class DataEntityExt(object): '''A complex data value entity ext.''' @staticmethod def validate_datatype(type, value, entry_schema=None, custom_def=None): @@ -29,6 +29,5 @@ class DataEntityExt(object): return float(value) except Exception: ExceptionCollector.appendException(ValueError(('"%s" is not an float.') % value)) - return DataEntity.validate_datatype(type, value, entry_schema, custom_def) return value diff --git a/lcm/pub/utils/toscaparser/nsdmodel.py b/lcm/pub/utils/toscaparser/nsdmodel.py index e13f0265..9792dd99 100644 --- a/lcm/pub/utils/toscaparser/nsdmodel.py +++ b/lcm/pub/utils/toscaparser/nsdmodel.py @@ -44,7 +44,6 @@ class EtsiNsdInfoModel(BaseInfoModel): self.ns_flavours = self.get_all_flavour(tosca.topology_template.groups) self.nested_ns = self.get_all_nested_ns(nodeTemplates) - def buildInputs(self, top_inputs): ret = {} for tmpinput in top_inputs: @@ -57,7 +56,7 @@ class EtsiNsdInfoModel(BaseInfoModel): return ret def buildNode(self, nodeTemplate, inputs, parsed_params): - ret ={} + ret = {} ret['name'] = nodeTemplate.name ret['nodeType'] = nodeTemplate.type if 'description' in nodeTemplate.entity_tpl: @@ -70,7 +69,8 @@ class EtsiNsdInfoModel(BaseInfoModel): self.buildCapabilities(nodeTemplate, inputs, ret) self.buildArtifacts(nodeTemplate, inputs, ret) interfaces = self.build_interfaces(nodeTemplate) - if interfaces: ret['interfaces'] = interfaces + if interfaces: + ret['interfaces'] = interfaces return ret def _get_all_vnf(self, nodeTemplates): @@ -111,7 +111,7 @@ class EtsiNsdInfoModel(BaseInfoModel): for key, value in item.items(): if key.upper().startswith('VIRTUALBINDING'): req_node_name = self.get_requirement_node_name(value) - if req_node_name != None and req_node_name == node['name']: + if req_node_name is not None and req_node_name == node['name']: cps.append(tmpnode) return cps @@ -171,9 +171,9 @@ class EtsiNsdInfoModel(BaseInfoModel): cp_vl = {} cp_vl['vl_id'] = self.get_prop_from_obj(req, 'node') relationship = self.get_prop_from_obj(req, 'relationship') - if relationship != None: + if relationship is not None: properties = self.get_prop_from_obj(relationship, 'properties') - if properties != None and isinstance(properties, dict): + if properties is not None and isinstance(properties, dict): for key, value in properties.items(): cp_vl[key] = value return cp_vl diff --git a/lcm/pub/utils/toscaparser/vnfdmodel.py b/lcm/pub/utils/toscaparser/vnfdmodel.py index f26ec967..a665efe7 100644 --- a/lcm/pub/utils/toscaparser/vnfdmodel.py +++ b/lcm/pub/utils/toscaparser/vnfdmodel.py @@ -47,7 +47,6 @@ class EtsiVnfdInfoModel(EtsiNsdInfoModel): self.vnf_exposed = self.get_all_endpoint_exposed(tosca.topology_template) self.vnf_flavours = self.get_all_flavour(tosca.topology_template.groups) - def _get_all_services(self, nodeTemplates): ret = [] for node in nodeTemplates: @@ -177,13 +176,13 @@ class EtsiVnfdInfoModel(EtsiNsdInfoModel): ret['dependencies'] = map(lambda x: self.get_requirement_node_name(x), self.getNodeDependencys(node)) nfv_compute = self.getCapabilityByName(node, 'nfv_compute') - if nfv_compute != None and 'properties' in nfv_compute: + if nfv_compute is not None and 'properties' in nfv_compute: ret['nfv_compute'] = nfv_compute['properties'] ret['vls'] = self.get_linked_vl_ids(node, nodeTemplates) scalable = self.getCapabilityByName(node, 'scalable') - if scalable != None and 'properties' in scalable: + if scalable is not None and 'properties' in scalable: ret['scalable'] = scalable['properties'] ret['cps'] = self.getVirtalBindingCpIds(node, nodeTemplates) @@ -296,8 +295,8 @@ class EtsiVnfdInfoModel(EtsiNsdInfoModel): policies = [] scaling_policies = self.get_scaling_policies(top_policies) healing_policies = self.get_healing_policies(top_policies) - policies.append({"scaling":scaling_policies, 'healing':healing_policies}) + policies.append({"scaling": scaling_policies, 'healing': healing_policies}) return policies def get_healing_policies(self, top_policies): - return self.get_policies_by_keyword(top_policies,'.HEALING') + return self.get_policies_by_keyword(top_policies, '.HEALING') diff --git a/lcm/pub/utils/values.py b/lcm/pub/utils/values.py index 27d71a53..10700d02 100644 --- a/lcm/pub/utils/values.py +++ b/lcm/pub/utils/values.py @@ -22,4 +22,3 @@ def ignore_case_get(args, key, def_val=""): if old_key.upper() == key.upper(): return args[old_key] return def_val - diff --git a/lcm/samples/views.py b/lcm/samples/views.py index 0e3c6acf..53153d0b 100644 --- a/lcm/samples/views.py +++ b/lcm/samples/views.py @@ -15,11 +15,9 @@ import logging import traceback -from rest_framework.views import APIView -from rest_framework.response import Response from rest_framework import status -from lcm.pub.database import models - +from rest_framework.response import Response +from rest_framework.views import APIView logger = logging.getLogger(__name__) @@ -32,6 +30,7 @@ class SampleList(APIView): logger.debug("get") return Response({"status": "active"}) + class TablesList(APIView): def delete(self, request, modelName): logger.debug("Start delete model %s", modelName) @@ -47,7 +46,6 @@ class TablesList(APIView): status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(data={}, status=status.HTTP_204_NO_CONTENT) - def get(self, request, modelName): logger.debug("Get model %s", modelName) count = 0 @@ -59,7 +57,3 @@ class TablesList(APIView): return Response(data={"error": "failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(data={"count": count}, status=status.HTTP_200_OK) - - - - diff --git a/lcm/swagger/tests.py b/lcm/swagger/tests.py index 54551faf..4f2e6522 100644 --- a/lcm/swagger/tests.py +++ b/lcm/swagger/tests.py @@ -10,7 +10,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. import unittest -import json + from django.test import Client from rest_framework import status diff --git a/lcm/swagger/views.py b/lcm/swagger/views.py index fdef8aed..5034905c 100644 --- a/lcm/swagger/views.py +++ b/lcm/swagger/views.py @@ -14,20 +14,16 @@ import json import logging import os -import traceback -from rest_framework import status from rest_framework.response import Response from rest_framework.views import APIView - logger = logging.getLogger(__name__) class SwaggerJsonView(APIView): def get(self, request): - json_file = os.path.join(os.path.dirname(__file__), 'vfc.nslcm.swagger.json') f = open(json_file) json_data = json.JSONDecoder().decode(f.read()) @@ -49,7 +45,6 @@ class SwaggerJsonView(APIView): json_data["paths"].update(json_data_temp["paths"]) json_data["definitions"].update(json_data_temp["definitions"]) - json_file = os.path.join(os.path.dirname(__file__), 'vfc.sfclcm.swagger.json') f = open(json_file) json_data_temp = json.JSONDecoder().decode(f.read()) @@ -58,18 +53,13 @@ class SwaggerJsonView(APIView): json_data["paths"].update(json_data_temp["paths"]) json_data["definitions"].update(json_data_temp["definitions"]) - json_file = os.path.join(os.path.dirname(__file__), 'vfc.others.swagger.json') f = open(json_file) json_data_temp = json.JSONDecoder().decode(f.read()) f.close() - json_data_jobtemp=json_data["paths"]["/jobs/{jobId}"] + json_data_jobtemp = json_data["paths"]["/jobs/{jobId}"] json_data["paths"].update(json_data_temp["paths"]) json_data["paths"]["/jobs/{jobId}"].update(json_data_jobtemp) json_data["definitions"].update(json_data_temp["definitions"]) - return Response(json_data) - - - diff --git a/lcm/workflows/auto_deploy.py b/lcm/workflows/auto_deploy.py index 8813542b..4f19d804 100644 --- a/lcm/workflows/auto_deploy.py +++ b/lcm/workflows/auto_deploy.py @@ -19,6 +19,7 @@ from lcm.pub.msapi import activiti logger = logging.getLogger(__name__) + def deploy_workflow_on_startup(): try: if WFPlanModel.objects.filter(): @@ -35,4 +36,3 @@ def deploy_workflow_on_startup(): logger.info("Deploy workflow successfully.") except: logger.error(traceback.format_exc()) - diff --git a/lcm/workflows/build_in.py b/lcm/workflows/build_in.py index 426e0c0a..cbc21858 100644 --- a/lcm/workflows/build_in.py +++ b/lcm/workflows/build_in.py @@ -43,6 +43,8 @@ format of input_data "sdnControllerId": uuid of SDN controller } """ + + def run_ns_instantiate(input_data): logger.debug("Enter %s, input_data is %s", fun_name(), input_data) ns_instantiate_ok = False @@ -116,6 +118,7 @@ def create_vl(ns_inst_id, vl_index, nsd, ns_param): logger.debug("Create VL(%s) successfully.", vl_id) + def create_vnf(ns_inst_id, vnf_index, nf_param): uri = "api/nslcm/v1/ns/vnfs" data = json.JSONEncoder().encode({ @@ -135,6 +138,7 @@ def create_vnf(ns_inst_id, vnf_index, nf_param): logger.debug("Create VNF(%s) started.", vnf_inst_id) return vnf_inst_id, job_id, vnf_index - 1 + def create_sfc(ns_inst_id, fp_index, nsd_json, sdnc_id): uri = "api/nslcm/v1/ns/sfcs" data = json.JSONEncoder().encode({ @@ -155,6 +159,7 @@ def create_sfc(ns_inst_id, fp_index, nsd_json, sdnc_id): logger.debug("Create SFC(%s) started.", sfc_inst_id) return sfc_inst_id, job_id, fp_index - 1 + def post_deal(ns_inst_id, status): uri = "api/nslcm/v1/ns/{nsInstanceId}/postdeal".format(nsInstanceId=ns_inst_id) data = json.JSONEncoder().encode({ @@ -166,6 +171,7 @@ def post_deal(ns_inst_id, status): logger.error("Failed to call post_deal(%s): %s", ns_inst_id, ret[1]) logger.debug("Call post_deal(%s, %s) successfully.", ns_inst_id, status) + def update_job(job_id, progress, errcode, desc): uri = "api/nslcm/v1/jobs/{jobId}".format(jobId=job_id) data = json.JSONEncoder().encode({ @@ -175,11 +181,11 @@ def update_job(job_id, progress, errcode, desc): }) restcall.req_by_msb(uri, "POST", data) + class JobWaitThread(Thread): """ Job Wait """ - def __init__(self, inst_id, job_id, ns_job_id, index): Thread.__init__(self) self.inst_id = inst_id @@ -226,6 +232,7 @@ class JobWaitThread(Thread): if job_end_normal: g_jobs_status[self.ns_job_id][self.index] = 0 + def wait_until_jobs_done(g_job_id, jobs): job_threads = [] for inst_id, job_id, index in jobs: @@ -239,6 +246,7 @@ def wait_until_jobs_done(g_job_id, jobs): logger.error("g_jobs_status[%s]: %s", g_job_id, g_jobs_status[g_job_id]) raise NSLCMException("Some jobs failed!") + def confirm_vnf_status(vnf_inst_id): uri = "api/nslcm/v1/ns/vnfs/{vnfInstId}".format(vnfInstId=vnf_inst_id) ret = restcall.req_by_msb(uri, "GET") @@ -250,6 +258,7 @@ def confirm_vnf_status(vnf_inst_id): if vnf_status != "active": raise NSLCMException("Status of VNF(%s) is not active" % vnf_inst_id) + def confirm_sfc_status(sfc_inst_id): uri = "api/nslcm/v1/ns/sfcs/{sfcInstId}".format(sfcInstId=sfc_inst_id) ret = restcall.req_by_msb(uri, "GET") @@ -260,10 +269,3 @@ def confirm_sfc_status(sfc_inst_id): sfc_status = ret[1]["sfcStatus"] if sfc_status != "active": raise NSLCMException("Status of SFC(%s) is not active" % sfc_inst_id) - - - - - - - diff --git a/lcm/workflows/tests.py b/lcm/workflows/tests.py index f6597c5c..c74f44d1 100644 --- a/lcm/workflows/tests.py +++ b/lcm/workflows/tests.py @@ -12,18 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest import json -import mock import os +import unittest + +import mock from django.test import Client from rest_framework import status +from lcm.pub.database.models import WFPlanModel from lcm.pub.utils import restcall -from lcm.pub.database.models import WFPlanModel, JobStatusModel -from lcm.pub.utils.jobutil import JobUtil from lcm.workflows import build_in + class WorkflowViewTest(unittest.TestCase): def setUp(self): self.client = Client() @@ -34,12 +35,13 @@ class WorkflowViewTest(unittest.TestCase): @mock.patch.object(restcall, 'upload_by_msb') def test_deploy_workflow(self, mock_upload_by_msb): - mock_upload_by_msb.return_value = [0, json.JSONEncoder().encode({ + res_data = { "status": "1", "message": "2", "deployedId": "3", "processId": "4" - }), '202'] + } + mock_upload_by_msb.return_value = [0, json.JSONEncoder().encode(res_data), '202'] response = self.client.post("/api/nslcm/v1/workflow", {"filePath": os.path.abspath(__file__)}, format='json') self.assertEqual(status.HTTP_202_ACCEPTED, response.status_code, response.content) @@ -51,13 +53,13 @@ class WorkflowViewTest(unittest.TestCase): mock_call_req.return_value = [0, json.JSONEncoder().encode({ "status": "1", "message": "2" - }), '202'] + }), '202'] mock_upload_by_msb.return_value = [0, json.JSONEncoder().encode({ "status": "2", "message": "3", "deployedId": "4", "processId": "5" - }), '202'] + }), '202'] WFPlanModel(deployed_id="1", process_id="2", status="3", message="4").save() response = self.client.post("/api/nslcm/v1/workflow", {"filePath": os.path.abspath(__file__), "forceDeploy": "True"}, format='json') @@ -93,37 +95,37 @@ class WorkflowViewTest(unittest.TestCase): "result": "0", "detail": "vl1", "vlId": "1" - }), '201'], + }), '201'], "api/nslcm/v1/ns/vnfs": [0, json.JSONEncoder().encode({ "vnfInstId": "2", "jobId": "11" - }), '201'], + }), '201'], "api/nslcm/v1/ns/vnfs/2": [0, json.JSONEncoder().encode({ "vnfStatus": "active" - }), '201'], + }), '201'], "api/nslcm/v1/ns/sfcs": [0, json.JSONEncoder().encode({ "sfcInstId": "3", "jobId": "111" - }), '201'], + }), '201'], "api/nslcm/v1/ns/sfcs/3": [0, json.JSONEncoder().encode({ "sfcStatus": "active" - }), '201'], + }), '201'], "/api/nslcm/v1/jobs/11?responseId=0": [0, json.JSONEncoder().encode({"responseDescriptor": { "responseId": "1", "progress": 100, "statusDescription": "ok" - }}), '200'], + }}), '200'], "/api/nslcm/v1/jobs/111?responseId=0": [0, json.JSONEncoder().encode({"responseDescriptor": { "responseId": "1", "progress": 100, "statusDescription": "ok" - }}), '200'], + }}), '200'], "api/nslcm/v1/jobs/{jobId}".format(jobId=job_id): [0, '{}', '201'], "api/nslcm/v1/ns/{nsInstanceId}/postdeal".format(nsInstanceId=ns_inst_id): @@ -135,15 +137,3 @@ class WorkflowViewTest(unittest.TestCase): mock_call_req.side_effect = side_effect self.assertTrue(build_in.run_ns_instantiate(wf_input)) - - - - - - - - - - - - diff --git a/lcm/workflows/views.py b/lcm/workflows/views.py index 41779e87..8e48acb2 100644 --- a/lcm/workflows/views.py +++ b/lcm/workflows/views.py @@ -57,8 +57,3 @@ def deploy_workflow(request, *args, **kwargs): return Response(data={'error': str(sys.exc_info())}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) logger.debug("Leave %s", fun_name()) return Response(data={'msg': 'OK'}, status=status.HTTP_202_ACCEPTED) - - - - -