Return flavor-id in OOF response
[optf/has.git] / conductor / conductor / solver / service.py
1 #
2 # -------------------------------------------------------------------------
3 #   Copyright (c) 2015-2017 AT&T Intellectual Property
4 #
5 #   Licensed under the Apache License, Version 2.0 (the "License");
6 #   you may not use this file except in compliance with the License.
7 #   You may obtain a copy of the License at
8 #
9 #       http://www.apache.org/licenses/LICENSE-2.0
10 #
11 #   Unless required by applicable law or agreed to in writing, software
12 #   distributed under the License is distributed on an "AS IS" BASIS,
13 #   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 #   See the License for the specific language governing permissions and
15 #   limitations under the License.
16 #
17 # -------------------------------------------------------------------------
18 #
19
20 import collections
21
22 import conductor.common.prometheus_metrics as PC
23 import cotyledon
24 import json
25 import time
26 import traceback
27 import json
28 import socket
29 import json
30 from oslo_config import cfg
31 from oslo_log import log
32
33 from conductor.common.models import plan, region_placeholders, country_latency, group_rules, groups
34 from conductor.common.models import order_lock
35 from conductor.common.models import order_lock_history
36 from conductor.common.music import api
37 from conductor.common.music import messaging as music_messaging
38 from conductor.common.music.model import base
39 from conductor.i18n import _LE, _LI
40 from conductor import messaging
41 from conductor import service
42 from conductor.solver.optimizer import optimizer
43 from conductor.solver.request import parser
44 from conductor.solver.utils import constraint_engine_interface as cei
45 from conductor.common.utils import conductor_logging_util as log_util
46 from conductor.common.models.order_lock import OrderLock
47 from conductor.common.models import triage_tool
48 from conductor.common.models.triage_tool import TriageTool
49
50 # To use oslo.log in services:
51 #
52 # 0. Note that conductor.service.prepare_service() bootstraps this.
53 #    It's set up within conductor.cmd.SERVICENAME.
54 # 1. Add "from oslo_log import log"
55 # 2. Also add "LOG = log.getLogger(__name__)"
56 # 3. For i18n support, import appropriate shortcuts as well:
57 #    "from i18n import _, _LC, _LE, _LI, _LW  # noqa"
58 #    (that's for primary, critical, error, info, warning)
59 # 4. Use LOG.info, LOG.warning, LOG.error, LOG.critical, LOG.debug, e.g.:
60 #    "LOG.info(_LI("Something happened with {}").format(thingie))"
61 # 5. Do NOT put translation wrappers around any LOG.debug text.
62 # 6. Be liberal with logging, especially in the absence of unit tests!
63 # 7. Calls to print() are verboten within the service proper.
64 #    Logging can be redirected! (In a CLI-side script, print() is fine.)
65 #
66 # Usage: http://docs.openstack.org/developer/oslo.i18n/usage.html
67
68 LOG = log.getLogger(__name__)
69
70 # To use oslo.config in services:
71 #
72 # 0. Note that conductor.service.prepare_service() bootstraps this.
73 #    It's set up within conductor.cmd.SERVICENAME.
74 # 1. Add "from oslo_config import cfg"
75 # 2. Also add "CONF = cfg.CONF"
76 # 3. Set a list of locally used options (SOLVER_OPTS is fine).
77 #    Choose key names thoughtfully. Be technology-agnostic, avoid TLAs, etc.
78 # 4. Register, e.g. "CONF.register_opts(SOLVER_OPTS, group='solver')"
79 # 5. Add file reference to opts.py (may need to use itertools.chain())
80 # 6. Run tox -e genconfig to build a new config template.
81 # 7. If you want to load an entire config from a CLI you can do this:
82 #    "conf = service.prepare_service([], config_files=[CONFIG_FILE])"
83 # 8. You can even use oslo_config from a CLI and override values on the fly,
84 #    e.g., "CONF.set_override('hostnames', ['music2'], 'music_api')"
85 #    (leave the third arg out to use the DEFAULT group).
86 # 9. Loading a config from a CLI is optional. So long as all the options
87 #    have defaults (or you override them as needed), it should all work.
88 #
89 # Docs: http://docs.openstack.org/developer/oslo.config/
90
91 CONF = cfg.CONF
92
93 SOLVER_OPTS = [
94     cfg.IntOpt('workers',
95                default=1,
96                min=1,
97                help='Number of workers for solver service. '
98                     'Default value is 1.'),
99     cfg.IntOpt('solver_timeout',
100                default=480,
101                min=1,
102                help='The timeout value for solver service. '
103                     'Default value is 480 seconds.'),
104     cfg.BoolOpt('concurrent',
105                 default=False,
106                 help='Set to True when solver will run in active-active '
107                      'mode. When set to False, solver will restart any '
108                      'orphaned solving requests at startup.'),
109     cfg.IntOpt('timeout',
110                default=600,
111                min=1,
112                help='Timeout for detecting a VM is down, and other VMs can pick the plan up. '
113                     'This value should be larger than solver_timeout'
114                     'Default value is 10 minutes. (integer value)'),
115     cfg.IntOpt('max_solver_counter',
116                default=1,
117                min=1)
118 ]
119
120 CONF.register_opts(SOLVER_OPTS, group='solver')
121
122 # Pull in service opts. We use them here.
123 OPTS = service.OPTS
124 CONF.register_opts(OPTS)
125
126
127 class SolverServiceLauncher(object):
128     """Launcher for the solver service."""
129     def __init__(self, conf):
130
131         self.conf = conf
132
133         # Set up Music access.
134         self.music = api.API()
135         self.music.keyspace_create(keyspace=conf.keyspace)
136
137         # Dynamically create a plan class for the specified keyspace
138         self.Plan = base.create_dynamic_model(
139             keyspace=conf.keyspace, baseclass=plan.Plan, classname="Plan")
140         self.OrderLock =base.create_dynamic_model(
141             keyspace=conf.keyspace, baseclass=order_lock.OrderLock, classname="OrderLock")
142         self.OrderLockHistory = base.create_dynamic_model(
143             keyspace=conf.keyspace, baseclass=order_lock_history.OrderLockHistory, classname="OrderLockHistory")
144         self.RegionPlaceholders = base.create_dynamic_model(
145             keyspace=conf.keyspace, baseclass=region_placeholders.RegionPlaceholders, classname="RegionPlaceholders")
146         self.CountryLatency = base.create_dynamic_model(
147             keyspace=conf.keyspace, baseclass=country_latency.CountryLatency, classname="CountryLatency")
148         self.TriageTool = base.create_dynamic_model(
149             keyspace=conf.keyspace, baseclass=triage_tool.TriageTool ,classname = "TriageTool")
150         #self.Groups = base.create_dynamic_model(
151         #    keyspace=conf.keyspace, baseclass=groups.Groups, classname="Groups")
152         #self.GroupRules = base.create_dynamic_model(
153         #    keyspace=conf.keyspace, baseclass=group_rules.GroupRules, classname="GroupRules")
154
155         # Initialize Prometheus metrics Endpoint
156         # Solver service uses index 1
157         PC._init_metrics(1)
158
159         if not self.Plan:
160             raise
161         if not self.OrderLock:
162             raise
163         if not self.OrderLockHistory:
164             raise
165         if not self.RegionPlaceholders:
166             raise
167         if not self.CountryLatency:
168             raise
169         if not self.TriageTool:
170             raise
171         #if not self.Groups:
172         #    raise
173         #if not self.GroupRules:
174         #    raise
175
176     def run(self):
177         kwargs = {'plan_class': self.Plan,
178                   'order_locks': self.OrderLock,
179                   'order_locks_history': self.OrderLockHistory,
180                   'region_placeholders': self.RegionPlaceholders,
181                   'country_latency': self.CountryLatency,
182                   'triage_tool': self.TriageTool
183                   #'groups': self.Groups,
184                   #'group_rules': self.GroupRules
185                   }
186         # kwargs = {'plan_class': self.Plan}
187         svcmgr = cotyledon.ServiceManager()
188         svcmgr.add(SolverService,
189                    workers=self.conf.solver.workers,
190                    args=(self.conf,), kwargs=kwargs)
191         svcmgr.run()
192
193
194 class SolverService(cotyledon.Service):
195     """Solver service."""
196
197     # This will appear in 'ps xaf'
198     name = "Conductor Solver"
199
200     regions = collections.OrderedDict()
201     countries = list()
202
203     def __init__(self, worker_id, conf, **kwargs):
204         """Initializer"""
205
206         LOG.debug("%s" % self.__class__.__name__)
207         super(SolverService, self).__init__(worker_id)
208         self._init(conf, **kwargs)
209         self.running = True
210
211     def _init(self, conf, **kwargs):
212         """Set up the necessary ingredients."""
213         self.conf = conf
214         self.kwargs = kwargs
215
216         self.Plan = kwargs.get('plan_class')
217         self.OrderLock = kwargs.get('order_locks')
218         self.OrderLockHistory = kwargs.get('order_locks_history')
219         #self.OrderLock =kwargs.get('order_locks')
220         self.RegionPlaceholders = kwargs.get('region_placeholders')
221         self.CountryLatency = kwargs.get('country_latency')
222         self.TriageTool = kwargs.get('triage_tool')
223
224         # self.Groups = kwargs.get('groups')
225         #self.GroupRules = kwargs.get('group_rules')
226         # Set up the RPC service(s) we want to talk to.
227         self.data_service = self.setup_rpc(conf, "data")
228
229         # Set up the cei and optimizer
230         self.cei = cei.ConstraintEngineInterface(self.data_service)
231         # self.optimizer = optimizer.Optimizer(conf)
232
233         # Set up Music access.
234         self.music = api.API()
235         self.solver_owner_condition = {
236             "solver_owner": socket.gethostname()
237         }
238         self.translated_status_condition = {
239             "status": self.Plan.TRANSLATED
240         }
241         self.solving_status_condition = {
242             "status": self.Plan.SOLVING
243         }
244
245         if not self.conf.solver.concurrent:
246             self._reset_solving_status()
247
248     def _gracefully_stop(self):
249         """Gracefully stop working on things"""
250         pass
251
252     def current_time_seconds(self):
253         """Current time in milliseconds."""
254         return int(round(time.time()))
255
256     def _reset_solving_status(self):
257         """Reset plans being solved so they are solved again.
258
259         Use this only when the solver service is not running concurrently.
260         """
261
262         plans = self.Plan.query.get_plan_by_col("status", self.Plan.SOLVING)
263         for the_plan in plans:
264             the_plan.status = self.Plan.TRANSLATED
265             # Use only in active-passive mode, so don't have to be atomic
266             the_plan.update()
267
268     def _restart(self):
269         """Prepare to restart the service"""
270         pass
271
272     def millisec_to_sec(self, millisec):
273         """Convert milliseconds to seconds"""
274         return millisec/1000
275
276     def setup_rpc(self, conf, topic):
277         """Set up the RPC Client"""
278         # TODO(jdandrea): Put this pattern inside music_messaging?
279         transport = messaging.get_transport(conf=conf)
280         target = music_messaging.Target(topic=topic)
281         client = music_messaging.RPCClient(conf=conf,
282                                            transport=transport,
283                                            target=target)
284         return client
285
286     def run(self):
287
288         """Run"""
289         LOG.debug("%s" % self.__class__.__name__)
290         # TODO(snarayanan): This is really meant to be a control loop
291         # As long as self.running is true, we process another request.
292
293         while self.running:
294
295             # Delay time (Seconds) for MUSIC requests.
296             time.sleep(self.conf.delay_time)
297
298             # plans = Plan.query().all()
299             # Find the first plan with a status of TRANSLATED.
300             # Change its status to SOLVING.
301             # Then, read the "translated" field as "template".
302             json_template = None
303             p = None
304
305             requests_to_solve = dict()
306             regions_maps = dict()
307             country_groups = list()
308
309             # Instead of using the query.all() method, now creating an index for 'status'
310             # field in conductor.plans table, and query plans by status columns
311             translated_plans = self.Plan.query.get_plan_by_col("status", self.Plan.TRANSLATED)
312             solving_plans = self.Plan.query.get_plan_by_col("status", self.Plan.SOLVING)
313
314
315             # combine the plans with status = 'translated' and 'solving' together
316             plans = translated_plans + solving_plans
317
318             found_translated_template = False
319
320             for p in plans:
321                 if p.status == self.Plan.TRANSLATED:
322                     json_template = p.translation
323                     found_translated_template = True
324                     break
325                 elif p.status == self.Plan.SOLVING and \
326                     (self.current_time_seconds() - self.millisec_to_sec(p.updated)) > self.conf.solver.timeout:
327                     p.status = self.Plan.TRANSLATED
328                     p.update(condition=self.solving_status_condition)
329                     break
330
331             if not json_template:
332                 if found_translated_template:
333                     message = _LE("Plan {} status is translated, yet "
334                                   "the translation wasn't found").format(p.id)
335                     LOG.error(message)
336                     p.status = self.Plan.ERROR
337                     p.message = message
338                     p.update(condition=self.translated_status_condition)
339                 continue
340
341             if found_translated_template and p and p.solver_counter >= self.conf.solver.max_solver_counter:
342                 message = _LE("Tried {} times. Plan {} is unable to solve")\
343                         .format(self.conf.solver.max_solver_counter, p.id)
344                 LOG.error(message)
345                 p.status = self.Plan.ERROR
346                 p.message = message
347                 p.update(condition=self.translated_status_condition)
348                 continue
349
350             log_util.setLoggerFilter(LOG, self.conf.keyspace, p.id)
351
352             p.status = self.Plan.SOLVING
353             p.solver_counter += 1
354             p.solver_owner = socket.gethostname()
355
356             _is_updated = p.update(condition=self.translated_status_condition)
357             if not _is_updated:
358                 continue
359
360             # other VMs have updated the status and start solving the plan
361             if 'FAILURE' in _is_updated:
362                 continue
363
364             LOG.info(_LI("Sovling starts, changing the template status from translated to solving, "
365                              "atomic update response from MUSIC {}").format(_is_updated))
366
367             LOG.info(_LI("Plan {} with request id {} is solving by machine {}. Tried to solve it for {} times.").
368                      format(p.id, p.name, p.solver_owner, p.solver_counter))
369
370             _is_success = "FAILURE"
371             request = parser.Parser()
372             request.cei = self.cei
373             request.request_id = p.name
374             request.plan_id = p.id
375             # getting the number of solutions need to provide
376             num_solution = getattr(p, 'recommend_max', '1')
377             if num_solution.isdigit():
378                 num_solution = int(num_solution)
379
380             #TODO(inam/larry): move this part of logic inside of parser and don't apply it to distance_between
381             try:
382                 # getting region placeholders from database and insert/put into regions_maps dictionary
383                 region_placeholders = self.RegionPlaceholders.query.all()
384                 for region in region_placeholders:
385                     regions_maps.update(region.countries)
386
387                 # getting country groups from database and insert into the country_groups list
388                 customer_loc = ''
389                 location_list = json_template["conductor_solver"]["locations"]
390                 for location_id, location_info in location_list.items():
391                     customer_loc = location_info['country']
392
393                 countries = self.CountryLatency.query.get_plan_by_col("country_name", customer_loc)
394                 LOG.info("Customer Location for Latency Reduction " + customer_loc)
395
396                 if len(countries) == 0:
397                     LOG.info("country is not present is country latency table, looking for * wildcard entry")
398                     countries = self.CountryLatency.query.get_plan_by_col("country_name","*")
399                 if len(countries) != 0:
400                     LOG.info("Found '*' wild card entry in country latency table")
401                 else:
402                     msg = "No '*' wild card entry found in country latency table. No solution will be provided"
403                     LOG.info(msg)
404                     p.message = msg
405
406                 for country in countries:
407                     country_groups = country.groups
408
409                 LOG.info("Done getting Latency Country DB Groups ")
410             except Exception as error_msg:
411                 LOG.error("Exception thrown while reading region_placeholders and country groups information "
412                           "from database. Exception message: {}".format(error_msg))
413
414             try:
415                 request.parse_template(json_template, country_groups, regions_maps)
416                 request.assgin_constraints_to_demands()
417                 requests_to_solve[p.id] = request
418                 opt = optimizer.Optimizer(self.conf, _requests=requests_to_solve)
419                 solution_list = opt.get_solution(num_solution)
420
421             except Exception as err:
422                 message = _LE("Plan {} status encountered a "
423                               "parsing error: {}").format(p.id, err.message)
424                 LOG.error(traceback.print_exc())
425                 p.status = self.Plan.ERROR
426                 p.message = message
427                 while 'FAILURE' in _is_success:
428                     _is_success = p.update(condition=self.solver_owner_condition)
429                     LOG.info(_LI("Encountered a parsing error, changing the template status from solving to error, "
430                                  "atomic update response from MUSIC {}").format(_is_success))
431
432                 continue
433
434             LOG.info("Preparing the recommendations ")
435             # checking if the order is 'initial' or 'speed changed' one
436             is_speed_change = False
437             if request and request.request_type == 'speed changed':
438                 is_speed_change = True
439
440             recommendations = []
441             if not solution_list or len(solution_list) < 1:
442                 # when order takes too much time to solve
443                 if (int(round(time.time())) - self.millisec_to_sec(p.updated)) > self.conf.solver.solver_timeout:
444                     message = _LI("Plan {} is timed out, exceed the expected "
445                                   "time {} seconds").format(p.id, self.conf.solver.timeout)
446
447                 # when no solution found
448                 else:
449                     message = _LI("Plan {} search failed, no "
450                                   "recommendations found by machine {}").format(p.id, p.solver_owner)
451                 LOG.info(message)
452                 # Update the plan status
453                 p.status = self.Plan.NOT_FOUND
454                 p.message = message
455
456                 # Metrics to Prometheus
457                 m_svc_name = p.template['parameters'].get('service_name', 'N/A')
458                 PC.VNF_FAILURE.labels('ONAP', m_svc_name).inc()
459
460                 while 'FAILURE' in _is_success:
461                     _is_success = p.update(condition=self.solver_owner_condition)
462                     LOG.info(_LI("Plan serach failed, changing the template status from solving to not found, "
463                                  "atomic update response from MUSIC {}").format(_is_success))
464             else:
465                 # Assemble recommendation result JSON
466                 for solution in solution_list:
467                     current_rec = dict()
468                     for demand_name in solution:
469                         resource = solution[demand_name]
470
471                         if not is_speed_change:
472                             is_rehome = "false"
473                         else:
474                             is_rehome = "false" if resource.get("existing_placement") == 'true' else "true"
475
476                         location_id = "" if resource.get("cloud_region_version") == '2.5' else resource.get("location_id")
477
478                         rec = {
479                             # FIXME(shankar) A&AI must not be hardcoded here.
480                             # Also, account for more than one Inventory Provider.
481                             "inventory_provider": "aai",
482                             "service_resource_id":
483                                 resource.get("service_resource_id"),
484                             "candidate": {
485                                 "candidate_id": resource.get("candidate_id"),
486                                 "inventory_type": resource.get("inventory_type"),
487                                 "cloud_owner": resource.get("cloud_owner"),
488                                 "location_type": resource.get("location_type"),
489                                 "location_id": location_id,
490                                 "is_rehome": is_rehome,
491                             },
492                             "attributes": {
493                                 "physical-location-id":
494                                     resource.get("physical_location_id"),
495                                 "cloud_owner": resource.get("cloud_owner"),
496                                 'aic_version': resource.get("cloud_region_version")},
497                         }
498
499                         if resource.get('vim-id'):
500                             rec["candidate"]['vim-id'] = resource.get('vim-id')
501
502                         if rec["candidate"]["inventory_type"] == "service":
503                             rec["attributes"]["host_id"] = resource.get("host_id")
504                             rec["attributes"]["service_instance_id"] = resource.get("candidate_id")
505                             rec["candidate"]["host_id"] = resource.get("host_id")
506
507                             if resource.get('vlan_key'):
508                                 rec["attributes"]['vlan_key'] = resource.get('vlan_key')
509                             if resource.get('port_key'):
510                                 rec["attributes"]['port_key'] = resource.get('port_key')
511
512                         elif rec["candidate"]["inventory_type"] == "cloud":
513                             if resource.get("all_directives") and resource.get("flavor_map"):
514                                 rec["attributes"]["directives"] = \
515                                     self.set_flavor_in_flavor_directives(
516                                         resource.get("flavor_map"), resource.get("all_directives"))
517
518                                 # Metrics to Prometheus
519                                 m_vim_id = resource.get("vim-id")
520                                 m_hpa_score = resource.get("hpa_score", 0)
521                                 m_svc_name = p.template['parameters'].get(
522                                     'service_name', 'N/A')
523                                 for vnfc, flavor in resource.get("flavor_map").iteritems():
524                                     PC.VNF_COMPUTE_PROFILES.labels('ONAP',
525                                                                    m_svc_name,
526                                                                    demand_name,
527                                                                    vnfc,
528                                                                    flavor,
529                                                                    m_vim_id).inc()
530
531                                 PC.VNF_SCORE.labels('ONAP', m_svc_name,
532                                                     demand_name,
533                                                     m_hpa_score).inc()
534
535                             if resource.get('conflict_id'):
536                                 rec["candidate"]["conflict_id"] = resource.get("conflict_id")
537
538
539                         # TODO(snarayanan): Add total value to recommendations?
540                         # msg = "--- total value of decision = {}"
541                         # LOG.debug(msg.format(_best_path.total_value))
542                         # msg = "--- total cost of decision = {}"
543                         # LOG.debug(msg.format(_best_path.total_cost))
544                         current_rec[demand_name] = rec
545
546                     recommendations.append(current_rec)
547
548                 # Update the plan with the solution
549                 p.solution = {
550                     "recommendations": recommendations
551                 }
552
553                 # multiple spin-ups logic
554                 '''
555                 go through list of recommendations in the solution
556                 for cloud candidates, check if (cloud-region-id + e2evnfkey) is in the order_locks table
557                 if so, insert the row with status 'parked' in order_locks, changes plan status to 'pending' in plans table (or other status value)
558                 otherwise, insert the row with status 'locked' in order_locks, and change status to 'solved' in plans table - continue reservation
559                 '''
560
561                 # clean up the data/record in order_locks table, deleting all records that failed from MSO
562                 order_locks = self.OrderLock.query.all()
563                 for order_lock_record in order_locks:
564
565                     plans = getattr(order_lock_record, 'plans')
566                     for plan_id, plan_attributes in plans.items():
567                         plan_dict = json.loads(plan_attributes)
568
569                         if plan_dict.get('status', None) == OrderLock.FAILED:
570                             order_lock_record.delete()
571                             LOG.info(_LI("The order lock record {} with status {} is deleted (due to failure spinup from MSO) from order_locks table").
572                                      format(order_lock_record, plan_dict.get('status')))
573                             break
574
575                 inserted_order_records_dict = dict()
576                 available_dependenies_set = set()
577
578                 is_inserted_to_order_locks = True
579                 is_conflict_id_missing = False
580                 is_order_translated_before_spinup = False
581
582                 for solution in solution_list:
583
584                     for demand_name, candidate in solution.items():
585                         if candidate.get('inventory_type') == 'cloud':
586                             conflict_id = candidate.get('conflict_id')
587                             service_resource_id = candidate.get('service_resource_id')
588                             # TODO(larry): add more logic for missing conflict_id in template
589                             if not conflict_id:
590                                 is_conflict_id_missing = True
591                                 break
592
593                             available_dependenies_set.add(conflict_id)
594                             # check if conflict_id exists in order_locks table
595                             order_lock_record = self.OrderLock.query.get_plan_by_col("id", conflict_id)
596                             if order_lock_record:
597                                 is_spinup_completed = getattr(order_lock_record[0], 'is_spinup_completed')
598                                 spinup_completed_timestamp = getattr(order_lock_record[0], 'spinup_completed_timestamp')
599                                 if is_spinup_completed and spinup_completed_timestamp > p.translation_begin_timestamp:
600                                     is_order_translated_before_spinup = True
601                                     break
602                                 elif not is_spinup_completed:
603                                     inserted_order_records_dict[conflict_id] = service_resource_id
604
605                 if is_conflict_id_missing:
606                     message = _LE("Missing conflict identifier field for cloud candidates in the template, "
607                                   "could not insert into order_locks table")
608                     LOG.debug(message)
609                     p.status = self.Plan.SOLVED
610
611                 elif is_order_translated_before_spinup:
612                     message = _LE("Retriggering Plan {} due to the new order arrives before the "
613                                   "spinup completion of the old order ").format(p.id)
614                     LOG.debug(message)
615                     p.rehome_plan()
616
617                 elif len(inserted_order_records_dict) > 0:
618
619                     new_dependenies_set = available_dependenies_set - set(inserted_order_records_dict.keys())
620                     dependencies = ','.join(str(s) for s in new_dependenies_set)
621
622                     for conflict_id, service_resource_id in inserted_order_records_dict.items():
623                         plan = {
624                             p.id: {
625                                 "status": OrderLock.UNDER_SPIN_UP,
626                                 "created": self.current_time_millis(),
627                                 "updated": self.current_time_millis(),
628                                 "service_resource_id": service_resource_id
629                             }
630                         }
631
632                         if dependencies:
633                             plan[p.id]['dependencies'] = dependencies
634
635                         order_lock_row = self.OrderLock(id=conflict_id, plans=plan)
636                         response = order_lock_row.insert()
637
638                         # TODO(larry): add more logs for inserting order lock record (insert/update)
639                         LOG.info(_LI("Inserting the order lock record to order_locks table in MUSIC, "
640                                      "conditional insert operation response from MUSIC {}").format(response))
641                         if response and response.status_code == 200:
642                             body = response.json()
643                             LOG.info("Succcessfully inserted the record in order_locks table with "
644                                      "the following response message {}".format(body))
645                         else:
646                             is_inserted_to_order_locks = False
647                 else:
648                     for solution in solution_list:
649                         for demand_name, candidate in solution.items():
650                             if candidate.get('inventory_type') == 'cloud':
651                                 conflict_id = candidate.get('conflict_id')
652                                 service_resource_id = candidate.get('service_resource_id')
653
654                                 order_lock_record = self.OrderLock.query.get_plan_by_col("id", conflict_id)
655                                 if order_lock_record:
656                                     deleting_record = order_lock_record[0]
657                                     plans = getattr(deleting_record, 'plans')
658                                     is_spinup_completed = getattr(deleting_record, 'is_spinup_completed')
659                                     spinup_completed_timestamp = getattr(deleting_record, 'spinup_completed_timestamp')
660
661                                     if is_spinup_completed:
662                                         # persist the record in order_locks_history table
663                                         order_lock_history_record = self.OrderLockHistory(conflict_id=conflict_id, plans=plans,
664                                                                                           is_spinup_completed=is_spinup_completed,
665                                                                                           spinup_completed_timestamp=spinup_completed_timestamp)
666                                         LOG.debug("Inserting the history record with conflict id {} to order_locks_history table".format(conflict_id))
667                                         order_lock_history_record.insert()
668                                         # remove the older record
669                                         LOG.debug("Deleting the order lock record {} from order_locks table".format(deleting_record))
670                                         deleting_record.delete()
671
672                                 plan = {
673                                     p.id: {
674                                         "status": OrderLock.UNDER_SPIN_UP,
675                                         "created": self.current_time_millis(),
676                                         "updated": self.current_time_millis(),
677                                         "service_resource_id": service_resource_id
678                                     }
679                                 }
680                                 order_lock_row = self.OrderLock(id=conflict_id, plans=plan)
681                                 response = order_lock_row.insert()
682                                 # TODO(larry): add more logs for inserting order lock record (insert/update)
683                                 LOG.info(_LI("Inserting the order lock record to order_locks table in MUSIC, "
684                                              "conditional insert operation response from MUSIC {}").format(response))
685                                 if response and response.status_code == 200:
686                                     body = response.json()
687                                     LOG.info("Succcessfully inserted the record in order_locks table "
688                                              "with the following response message {}".format(body))
689                                 else:
690                                     is_inserted_to_order_locks = False
691
692                 if not is_inserted_to_order_locks:
693                     message = _LE("Plan {} status encountered an "
694                                   "error while inserting order lock message to MUSIC.").format(p.id)
695                     LOG.error(message)
696                     p.status = self.Plan.ERROR
697                     p.message = message
698
699                 elif p.status == self.Plan.SOLVING:
700                     if len(inserted_order_records_dict) > 0:
701                         LOG.info(_LI("The plan with id {} is parked in order_locks table, waiting for MSO release calls").
702                                  format(p.id))
703                         p.status = self.Plan.WAITING_SPINUP
704                     else:
705                         LOG.info(_LI("The plan with id {} is inserted in order_locks table.").
706                                  format(p.id))
707                         p.status = self.Plan.SOLVED
708
709             while 'FAILURE' in _is_success and (self.current_time_seconds() - self.millisec_to_sec(p.updated)) <= self.conf.solver.timeout:
710                 _is_success = p.update(condition=self.solver_owner_condition)
711                 LOG.info(_LI("Plan search complete, changing the template status from solving to {}, "
712                                  "atomic update response from MUSIC {}").format(p.status, _is_success))
713
714             LOG.info(_LI("Plan {} search complete, {} solution(s) found by machine {}").
715                      format(p.id, len(solution_list), p.solver_owner))
716             LOG.debug("Plan {} detailed solution: {}".
717                       format(p.id, p.solution))
718             LOG.info("Plan name: {}".format(p.name))
719
720     def terminate(self):
721         """Terminate"""
722         LOG.debug("%s" % self.__class__.__name__)
723         self.running = False
724         self._gracefully_stop()
725         super(SolverService, self).terminate()
726
727     def reload(self):
728         """Reload"""
729         LOG.debug("%s" % self.__class__.__name__)
730         self._restart()
731
732     def current_time_millis(self):
733         """Current time in milliseconds."""
734         return int(round(time.time() * 1000))
735
736     def set_flavor_in_flavor_directives(self, flavor_map, directives):
737         '''
738         Insert the flavor name inside the flavor_map into flavor_directives
739         :param flavor_map: flavor map get
740         :param directives: All the directives get from request
741         '''
742         keys = flavor_map.keys()
743         for ele in directives.get("directives"):
744             for item in ele.get("directives"):
745                 if "flavor_directives" in item.get("type"):
746                     for attr in item.get("attributes"):
747                         attr["attribute_value"] = flavor_map.get(attr["attribute_name"]) \
748                             if attr.get("attribute_name") in keys else ""
749         return directives