Merge "Added all common modules in conductor directory"
authorShankaranarayanan Puzhavakath Narayanan <snarayanan@research.att.com>
Sun, 17 Dec 2017 15:39:38 +0000 (15:39 +0000)
committerGerrit Code Review <gerrit@onap.org>
Sun, 17 Dec 2017 15:39:38 +0000 (15:39 +0000)
66 files changed:
conductor/conductor/api/__init__.py [new file with mode: 0644]
conductor/conductor/api/app.py [new file with mode: 0644]
conductor/conductor/api/app.wsgi [new file with mode: 0644]
conductor/conductor/api/controllers/__init__.py [new file with mode: 0644]
conductor/conductor/api/controllers/errors.py [new file with mode: 0644]
conductor/conductor/api/controllers/root.py [new file with mode: 0644]
conductor/conductor/api/controllers/v1/__init__.py [new file with mode: 0644]
conductor/conductor/api/controllers/v1/plans.py [new file with mode: 0644]
conductor/conductor/api/controllers/v1/root.py [new file with mode: 0644]
conductor/conductor/api/controllers/validator.py [new file with mode: 0644]
conductor/conductor/api/hooks.py [new file with mode: 0644]
conductor/conductor/api/middleware.py [new file with mode: 0644]
conductor/conductor/api/rbac.py [new file with mode: 0644]
conductor/conductor/controller/__init__.py [new file with mode: 0644]
conductor/conductor/controller/rpc.py [new file with mode: 0644]
conductor/conductor/controller/service.py [new file with mode: 0644]
conductor/conductor/controller/translator.py [new file with mode: 0644]
conductor/conductor/controller/translator_svc.py [new file with mode: 0644]
conductor/conductor/data/__init__.py [new file with mode: 0644]
conductor/conductor/data/plugins/__init__.py [new file with mode: 0644]
conductor/conductor/data/plugins/base.py [new file with mode: 0644]
conductor/conductor/data/plugins/inventory_provider/__init__.py [new file with mode: 0644]
conductor/conductor/data/plugins/inventory_provider/aai.py [new file with mode: 0644]
conductor/conductor/data/plugins/inventory_provider/base.py [new file with mode: 0644]
conductor/conductor/data/plugins/inventory_provider/extensions.py [new file with mode: 0644]
conductor/conductor/data/plugins/service_controller/__init__.py [new file with mode: 0644]
conductor/conductor/data/plugins/service_controller/base.py [new file with mode: 0644]
conductor/conductor/data/plugins/service_controller/extensions.py [new file with mode: 0644]
conductor/conductor/data/plugins/service_controller/sdnc.py [new file with mode: 0644]
conductor/conductor/data/service.py [new file with mode: 0644]
conductor/conductor/reservation/__init__.py [new file with mode: 0644]
conductor/conductor/reservation/service.py [new file with mode: 0644]
conductor/conductor/solver/__init__.py [new file with mode: 0644]
conductor/conductor/solver/optimizer/__init__.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/best_first.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/constraints/__init__.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/constraints/access_distance.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/constraints/attribute.py [new file with mode: 0644]
conductor/conductor/solver/optimizer/constraints/cloud_distance.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/constraints/constraint.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/constraints/inventory_group.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/constraints/service.py [new file with mode: 0644]
conductor/conductor/solver/optimizer/constraints/zone.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/decision_path.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/fit_first.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/greedy.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/optimizer.py [new file with mode: 0755]
conductor/conductor/solver/optimizer/random_pick.py [new file with mode: 0644]
conductor/conductor/solver/optimizer/search.py [new file with mode: 0755]
conductor/conductor/solver/request/__init__.py [new file with mode: 0755]
conductor/conductor/solver/request/demand.py [new file with mode: 0755]
conductor/conductor/solver/request/functions/__init__.py [new file with mode: 0755]
conductor/conductor/solver/request/functions/cloud_version.py [new file with mode: 0644]
conductor/conductor/solver/request/functions/distance_between.py [new file with mode: 0755]
conductor/conductor/solver/request/objective.py [new file with mode: 0755]
conductor/conductor/solver/request/parser.py [new file with mode: 0755]
conductor/conductor/solver/resource/__init__.py [new file with mode: 0755]
conductor/conductor/solver/resource/region.py [new file with mode: 0755]
conductor/conductor/solver/resource/service.py [new file with mode: 0755]
conductor/conductor/solver/service.py [new file with mode: 0644]
conductor/conductor/solver/simulators/__init__.py [new file with mode: 0644]
conductor/conductor/solver/simulators/a_and_ai/__init__.py [new file with mode: 0755]
conductor/conductor/solver/simulators/valet/__init__.py [new file with mode: 0755]
conductor/conductor/solver/utils/__init__.py [new file with mode: 0755]
conductor/conductor/solver/utils/constraint_engine_interface.py [new file with mode: 0644]
conductor/conductor/solver/utils/utils.py [new file with mode: 0755]

diff --git a/conductor/conductor/api/__init__.py b/conductor/conductor/api/__init__.py
new file mode 100644 (file)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/api/app.py b/conductor/conductor/api/app.py
new file mode 100644 (file)
index 0000000..70d54b5
--- /dev/null
@@ -0,0 +1,137 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import os
+import uuid
+
+from oslo_config import cfg
+from oslo_log import log
+from paste import deploy
+import pecan
+
+from conductor.api import hooks
+from conductor.api import middleware
+from conductor import service
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+OPTS = [
+    cfg.StrOpt('api_paste_config',
+               default="api_paste.ini",
+               help="Configuration file for WSGI definition of API."
+               ),
+]
+
+API_OPTS = [
+    cfg.BoolOpt('pecan_debug',
+                default=False,
+                help='Toggle Pecan Debug Middleware.'),
+    cfg.IntOpt('default_api_return_limit',
+               min=1,
+               default=100,
+               help='Default maximum number of items returned by API request.'
+               ),
+]
+
+CONF.register_opts(OPTS)
+CONF.register_opts(API_OPTS, group='api')
+
+# Pull in service opts. We use them here.
+OPTS = service.OPTS
+CONF.register_opts(OPTS)
+
+# Can call like so to force a particular config:
+# conductor-api --port=8091 -- --config-file=my_config
+#
+# For api command-line options:
+# conductor-api -- --help
+
+
+def setup_app(pecan_config=None, conf=None):
+    if conf is None:
+        raise RuntimeError("No configuration passed")
+
+    app_hooks = [
+        hooks.ConfigHook(conf),
+        hooks.MessagingHook(conf),
+    ]
+
+    pecan_config = pecan_config or {
+        "app": {
+            'root': 'conductor.api.controllers.root.RootController',
+            'modules': ['conductor.api'],
+        }
+    }
+
+    pecan.configuration.set_config(dict(pecan_config), overwrite=True)
+
+    app = pecan.make_app(
+        pecan_config['app']['root'],
+        debug=conf.api.pecan_debug,
+        hooks=app_hooks,
+        wrap_app=middleware.ParsableErrorMiddleware,
+        guess_content_type_from_ext=False,
+        default_renderer='json',
+        force_canonical=False,
+    )
+
+    return app
+
+
+# pastedeploy uses ConfigParser to handle global_conf, since Python 3's
+# ConfigParser doesn't allow storing objects as config values. Only strings
+# are permitted. Thus, to be able to pass an object created before paste
+# loads the app, we store them in a global variable. Then each loaded app
+# stores it's configuration using a unique key to be concurrency safe.
+global APPCONFIGS
+APPCONFIGS = {}
+
+
+def load_app(conf):
+    global APPCONFIGS
+
+    # Build the WSGI app
+    cfg_file = None
+    cfg_path = conf.api_paste_config
+    if not os.path.isabs(cfg_path):
+        cfg_file = conf.find_file(cfg_path)
+    elif os.path.exists(cfg_path):
+        cfg_file = cfg_path
+
+    if not cfg_file:
+        raise cfg.ConfigFilesNotFoundError([conf.api_paste_config])
+
+    configkey = str(uuid.uuid4())
+    APPCONFIGS[configkey] = conf
+
+    LOG.info("Full WSGI config used: %s" % cfg_file)
+    return deploy.loadapp("config:" + cfg_file,
+                          global_conf={'configkey': configkey})
+
+
+def app_factory(global_config, **local_conf):
+    global APPCONFIGS
+    conf = APPCONFIGS.get(global_config.get('configkey'))
+    return setup_app(conf=conf)
+
+
+def build_wsgi_app(argv=None):
+    return load_app(service.prepare_service(argv=argv))
diff --git a/conductor/conductor/api/app.wsgi b/conductor/conductor/api/app.wsgi
new file mode 100644 (file)
index 0000000..573d3d2
--- /dev/null
@@ -0,0 +1,9 @@
+"""Use this file for deploying the API under mod_wsgi.
+See http://pecan.readthedocs.org/en/latest/deployment.html for details.
+"""
+from conductor import service
+from conductor.api import app
+
+# Initialize the oslo configuration library and logging
+conf = service.prepare_service([])
+application = app.load_app(conf)
\ No newline at end of file
diff --git a/conductor/conductor/api/controllers/__init__.py b/conductor/conductor/api/controllers/__init__.py
new file mode 100644 (file)
index 0000000..4f46681
--- /dev/null
@@ -0,0 +1,54 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from os import path
+
+from notario import exceptions
+from notario.utils import forced_leaf_validator
+import pecan
+import six
+
+
+#
+# Error Handler
+#
+def error(url, msg=None, **kwargs):
+    """Error handler"""
+    if msg:
+        pecan.request.context['error_message'] = msg
+    if kwargs:
+        pecan.request.context['kwargs'] = kwargs
+    url = path.join(url, '?error_message=%s' % msg)
+    pecan.redirect(url, internal=True)
+
+
+#
+# Notario Custom Validators
+#
+@forced_leaf_validator
+def string_or_dict(_object, *args):
+    """Validator - Must be Basestring or Dictionary"""
+    error_msg = 'not of type dictionary or string'
+
+    if isinstance(_object, six.string_types):
+        return
+    if isinstance(_object, dict):
+        return
+    raise exceptions.Invalid('dict or basestring type', pair='value',
+                             msg=None, reason=error_msg, *args)
diff --git a/conductor/conductor/api/controllers/errors.py b/conductor/conductor/api/controllers/errors.py
new file mode 100644 (file)
index 0000000..6216721
--- /dev/null
@@ -0,0 +1,149 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import traceback
+
+from oslo_log import log
+import pecan
+from webob.exc import status_map
+
+from conductor.i18n import _
+
+LOG = log.getLogger(__name__)
+
+
+def error_wrapper(func):
+    """Error decorator."""
+    def func_wrapper(self, **kw):
+        """Wrapper."""
+
+        kwargs = func(self, **kw)
+        status = status_map.get(pecan.response.status_code)
+        message = getattr(status, 'explanation', '')
+        explanation = \
+            pecan.request.context.get('error_message', message)
+        error_type = status.__name__
+        title = status.title
+        traceback = getattr(kwargs, 'traceback', None)
+
+        LOG.error(explanation)
+
+        # Modeled after Heat's format
+        error = {
+            "explanation": explanation,
+            "code": pecan.response.status_code,
+            "error": {
+                "message": message,
+                "type": error_type,
+            },
+            "title": title,
+        }
+        if traceback:
+            error['error']['traceback'] = traceback
+        return error
+    return func_wrapper
+
+
+class ErrorsController(object):
+    """Errors Controller /errors/{error_name}"""
+
+    @pecan.expose('json')
+    @error_wrapper
+    def schema(self, **kw):
+        """400"""
+        pecan.request.context['error_message'] = \
+            str(pecan.request.validation_error)
+        pecan.response.status = 400
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose('json')
+    @error_wrapper
+    def invalid(self, **kw):
+        """400"""
+        pecan.response.status = 400
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose()
+    def unauthorized(self, **kw):
+        """401"""
+        # This error is terse and opaque on purpose.
+        # Don't give any clues to help AuthN along.
+        pecan.response.status = 401
+        pecan.response.content_type = 'text/plain'
+        LOG.error('unauthorized')
+        traceback.print_stack()
+        LOG.error(self.__class__)
+        LOG.error(kw)
+        pecan.response.body = _('Authentication required')
+        LOG.error(pecan.response.body)
+        return pecan.response
+
+    @pecan.expose('json')
+    @error_wrapper
+    def forbidden(self, **kw):
+        """403"""
+        pecan.response.status = 403
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose('json')
+    @error_wrapper
+    def not_found(self, **kw):
+        """404"""
+        pecan.response.status = 404
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose('json')
+    @error_wrapper
+    def not_allowed(self, **kw):
+        """405"""
+        kwargs = pecan.request.context.get('kwargs')
+        if kwargs:
+            allow = kwargs.get('allow', None)
+            if allow:
+                pecan.response.headers['Allow'] = allow
+        pecan.response.status = 405
+        return kwargs
+
+    @pecan.expose('json')
+    @error_wrapper
+    def conflict(self, **kw):
+        """409"""
+        pecan.response.status = 409
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose('json')
+    @error_wrapper
+    def server_error(self, **kw):
+        """500"""
+        pecan.response.status = 500
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose('json')
+    @error_wrapper
+    def unimplemented(self, **kw):
+        """501"""
+        pecan.response.status = 501
+        return pecan.request.context.get('kwargs')
+
+    @pecan.expose('json')
+    @error_wrapper
+    def unavailable(self, **kw):
+        """503"""
+        pecan.response.status = 503
+        return pecan.request.context.get('kwargs')
diff --git a/conductor/conductor/api/controllers/root.py b/conductor/conductor/api/controllers/root.py
new file mode 100644 (file)
index 0000000..d7c4a7e
--- /dev/null
@@ -0,0 +1,64 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import pecan
+
+from conductor.api.controllers import errors
+from conductor.api.controllers.v1 import root as v1
+
+MEDIA_TYPE_JSON = 'application/vnd.onap.has-%s+json'
+
+
+class RootController(object):
+    """Root Controller /"""
+
+    def __init__(self):
+        self.errors = errors.ErrorsController()
+        self.v1 = v1.V1Controller()
+
+    @pecan.expose(generic=True, template='json')
+    def index(self):
+        """Catchall for all methods"""
+        base_url = pecan.request.application_url
+        available = [{'tag': 'v1', 'date': '2016-11-01T00:00:00Z', }]
+        collected = [version_descriptor(base_url, v['tag'], v['date'])
+                     for v in available]
+        versions = {'versions': collected}
+        return versions
+
+
+def version_descriptor(base_url, version, released_on):
+    """Version Descriptor"""
+    url = version_url(base_url, version)
+    return {
+        'id': version,
+        'links': [
+            {'href': url, 'rel': 'self', },
+            {'href': 'https://wiki.onap.org/pages/viewpage.action?pageId=16005528',
+             'rel': 'describedby', 'type': 'text/html', }],
+        'media-types': [
+            {'base': 'application/json', 'type': MEDIA_TYPE_JSON % version, }],
+        'status': 'EXPERIMENTAL',
+        'updated': released_on,
+    }
+
+
+def version_url(base_url, version_number):
+    """Version URL"""
+    return '%s/%s' % (base_url, version_number)
diff --git a/conductor/conductor/api/controllers/v1/__init__.py b/conductor/conductor/api/controllers/v1/__init__.py
new file mode 100644 (file)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/api/controllers/v1/plans.py b/conductor/conductor/api/controllers/v1/plans.py
new file mode 100644 (file)
index 0000000..fa635f7
--- /dev/null
@@ -0,0 +1,261 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import six
+import yaml
+from yaml.constructor import ConstructorError
+
+from notario import decorators
+from notario.validators import types
+from oslo_log import log
+import pecan
+from pecan_notario import validate
+
+from conductor.api.controllers import error
+from conductor.api.controllers import string_or_dict
+from conductor.api.controllers import validator
+from conductor.i18n import _, _LI
+
+LOG = log.getLogger(__name__)
+
+CREATE_SCHEMA = (
+    (decorators.optional('files'), types.dictionary),
+    (decorators.optional('id'), types.string),
+    (decorators.optional('limit'), types.integer),
+    (decorators.optional('name'), types.string),
+    ('template', string_or_dict),
+    (decorators.optional('template_url'), types.string),
+    (decorators.optional('timeout'), types.integer),
+)
+
+
+class PlansBaseController(object):
+    """Plans Base Controller - Common Methods"""
+
+    def plan_link(self, plan_id):
+        return [
+            {
+                "href": "%(url)s/v1/%(endpoint)s/%(id)s" %
+                        {
+                            'url': pecan.request.application_url,
+                            'endpoint': 'plans',
+                            'id': plan_id,
+                        },
+                "rel": "self"
+            }
+        ]
+
+    def plans_get(self, plan_id=None):
+        ctx = {}
+        method = 'plans_get'
+        if plan_id:
+            args = {'plan_id': plan_id}
+            LOG.debug('Plan {} requested.'.format(plan_id))
+        else:
+            args = {}
+            LOG.debug('All plans requested.')
+
+        plans_list = []
+
+        client = pecan.request.controller
+        result = client.call(ctx, method, args)
+        plans = result and result.get('plans')
+
+        for the_plan in plans:
+            the_plan_id = the_plan.get('id')
+            the_plan['links'] = [self.plan_link(the_plan_id)]
+            plans_list.append(the_plan)
+
+        if plan_id:
+            if len(plans_list) == 1:
+                return plans_list[0]
+            else:
+                # For a single plan, we return None if not found
+                return None
+        else:
+            # For all plans, it's ok to return an empty list
+            return plans_list
+
+    def plan_create(self, args):
+        ctx = {}
+        method = 'plan_create'
+
+        # TODO(jdandrea): Enhance notario errors to use similar syntax
+        # valid_keys = ['files', 'id', 'limit', 'name',
+        #               'template', 'template_url', 'timeout']
+        # if not set(args.keys()).issubset(valid_keys):
+        #     invalid = [name for name in args if name not in valid_keys]
+        #     invalid_str = ', '.join(invalid)
+        #     error('/errors/invalid',
+        #           _('Invalid keys found: {}').format(invalid_str))
+        # required_keys = ['template']
+        # if not set(required_keys).issubset(args):
+        #     required = [name for name in required_keys if name not in args]
+        #     required_str = ', '.join(required)
+        #     error('/errors/invalid',
+        #           _('Missing required keys: {}').format(required_str))
+
+        LOG.debug('Plan creation requested (name "{}").'.format(
+            args.get('name')))
+
+        client = pecan.request.controller
+        result = client.call(ctx, method, args)
+        plan = result and result.get('plan')
+        if plan:
+            plan_name = plan.get('name')
+            plan_id = plan.get('id')
+            plan['links'] = [self.plan_link(plan_id)]
+            LOG.info(_LI('Plan {} (name "{}") created.').format(
+                plan_id, plan_name))
+        return plan
+
+    def plan_delete(self, plan):
+        ctx = {}
+        method = 'plans_delete'
+
+        plan_name = plan.get('name')
+        plan_id = plan.get('id')
+        LOG.debug('Plan {} (name "{}") deletion requested.'.format(
+            plan_id, plan_name))
+
+        args = {'plan_id': plan_id}
+        client = pecan.request.controller
+        client.call(ctx, method, args)
+        LOG.info(_LI('Plan {} (name "{}") deleted.').format(
+            plan_id, plan_name))
+
+
+class PlansItemController(PlansBaseController):
+    """Plans Item Controller /v1/plans/{plan_id}"""
+
+    def __init__(self, uuid4):
+        """Initializer."""
+        self.uuid = uuid4
+        self.plan = self.plans_get(plan_id=self.uuid)
+
+        if not self.plan:
+            error('/errors/not_found',
+                  _('Plan {} not found').format(self.uuid))
+        pecan.request.context['plan_id'] = self.uuid
+
+    @classmethod
+    def allow(cls):
+        """Allowed methods"""
+        return 'GET,DELETE'
+
+    @pecan.expose(generic=True, template='json')
+    def index(self):
+        """Catchall for unallowed methods"""
+        message = _('The {} method is not allowed.').format(
+            pecan.request.method)
+        kwargs = {'allow': self.allow()}
+        error('/errors/not_allowed', message, **kwargs)
+
+    @index.when(method='OPTIONS', template='json')
+    def index_options(self):
+        """Options"""
+        pecan.response.headers['Allow'] = self.allow()
+        pecan.response.status = 204
+
+    @index.when(method='GET', template='json')
+    def index_get(self):
+        """Get plan"""
+        return {"plans": [self.plan]}
+
+    @index.when(method='DELETE', template='json')
+    def index_delete(self):
+        """Delete a Plan"""
+        self.plan_delete(self.plan)
+        pecan.response.status = 204
+
+
+class PlansController(PlansBaseController):
+    """Plans Controller /v1/plans"""
+
+    @classmethod
+    def allow(cls):
+        """Allowed methods"""
+        return 'GET,POST'
+
+    @pecan.expose(generic=True, template='json')
+    def index(self):
+        """Catchall for unallowed methods"""
+        message = _('The {} method is not allowed.').format(
+            pecan.request.method)
+        kwargs = {'allow': self.allow()}
+        error('/errors/not_allowed', message, **kwargs)
+
+    @index.when(method='OPTIONS', template='json')
+    def index_options(self):
+        """Options"""
+        pecan.response.headers['Allow'] = self.allow()
+        pecan.response.status = 204
+
+    @index.when(method='GET', template='json')
+    def index_get(self):
+        """Get all the plans"""
+        plans = self.plans_get()
+        return {"plans": plans}
+
+    @index.when(method='POST', template='json')
+    @validate(CREATE_SCHEMA, '/errors/schema')
+    def index_post(self):
+        """Create a Plan"""
+
+        # Look for duplicate keys in the YAML/JSON, first in the
+        # entire request, and then again if the template parameter
+        # value is itself an embedded JSON/YAML string.
+        where = "API Request"
+        try:
+            parsed = yaml.load(pecan.request.text, validator.UniqueKeyLoader)
+            if 'template' in parsed:
+                where = "Template"
+                template = parsed['template']
+                if isinstance(template, six.string_types):
+                    yaml.load(template, validator.UniqueKeyLoader)
+        except ConstructorError as exc:
+            # Only bail on the duplicate key problem (problem and problem_mark
+            # attributes are available in ConstructorError):
+            if exc.problem is \
+                    validator.UniqueKeyLoader.DUPLICATE_KEY_PROBLEM_MARK:
+                # ConstructorError messages have a two line snippet.
+                # Grab it, get rid of the second line, and strip any
+                # remaining whitespace so we can fashion a one line msg.
+                snippet = exc.problem_mark.get_snippet()
+                snippet = snippet.split('\n')[0].strip()
+                msg = _('{} has a duplicate key on line {}: {}')
+                error('/errors/invalid',
+                      msg.format(where, exc.problem_mark.line + 1, snippet))
+        except Exception as exc:
+            # Let all others pass through for now.
+            pass
+
+        args = pecan.request.json
+        plan = self.plan_create(args)
+
+        if not plan:
+            error('/errors/server_error', _('Unable to create Plan.'))
+        else:
+            pecan.response.status = 201
+            return plan
+
+    @pecan.expose()
+    def _lookup(self, uuid4, *remainder):
+        """Pecan subcontroller routing callback"""
+        return PlansItemController(uuid4), remainder
diff --git a/conductor/conductor/api/controllers/v1/root.py b/conductor/conductor/api/controllers/v1/root.py
new file mode 100644 (file)
index 0000000..87b4a35
--- /dev/null
@@ -0,0 +1,47 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from oslo_log import log
+import pecan
+from pecan import secure
+
+from conductor.api.controllers import error
+from conductor.api.controllers.v1 import plans
+from conductor.i18n import _
+
+LOG = log.getLogger(__name__)
+
+
+class V1Controller(secure.SecureController):
+    """Version 1 API controller root."""
+
+    plans = plans.PlansController()
+
+    @classmethod
+    def check_permissions(cls):
+        """SecureController permission check callback"""
+        return True
+        # error('/errors/unauthorized', msg)
+
+    @pecan.expose(generic=True, template='json')
+    def index(self):
+        """Catchall for unallowed methods"""
+        message = _('The %s method is not allowed.') % pecan.request.method
+        kwargs = {}
+        error('/errors/not_allowed', message, **kwargs)
diff --git a/conductor/conductor/api/controllers/validator.py b/conductor/conductor/api/controllers/validator.py
new file mode 100644 (file)
index 0000000..f9bff3f
--- /dev/null
@@ -0,0 +1,63 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from yaml.constructor import ConstructorError
+from yaml.nodes import MappingNode
+
+try:
+    from yaml import CLoader as Loader
+except ImportError:
+    from yaml import Loader
+
+
+class UniqueKeyLoader(Loader):
+    """Unique Key Loader for PyYAML
+
+    Ensures no duplicate keys on any given level.
+
+    https://gist.github.com/pypt/94d747fe5180851196eb#gistcomment-2084028
+    """
+
+    DUPLICATE_KEY_PROBLEM_MARK = "found duplicate key"
+
+    def construct_mapping(self, node, deep=False):
+        """Check for duplicate keys while constructing a mapping."""
+        if not isinstance(node, MappingNode):
+            raise ConstructorError(
+                None, None, "expected a mapping node, but found %s" % node.id,
+                node.start_mark)
+        mapping = {}
+        for key_node, value_node in node.value:
+            key = self.construct_object(key_node, deep=deep)
+            try:
+                hash(key)
+            except (TypeError) as exc:
+                raise ConstructorError("while constructing a mapping",
+                                       node.start_mark,
+                                       "found unacceptable key (%s)" % exc,
+                                       key_node.start_mark)
+            # check for duplicate keys
+            if key in mapping:
+                raise ConstructorError("while constructing a mapping",
+                                       node.start_mark,
+                                       self.DUPLICATE_KEY_PROBLEM_MARK,
+                                       key_node.start_mark)
+            value = self.construct_object(value_node, deep=deep)
+            mapping[key] = value
+        return mapping
diff --git a/conductor/conductor/api/hooks.py b/conductor/conductor/api/hooks.py
new file mode 100644 (file)
index 0000000..08677cc
--- /dev/null
@@ -0,0 +1,137 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from oslo_log import log
+from pecan import hooks
+
+# from conductor.common.models import plan
+# from conductor.common.music import api
+from conductor.common.music import messaging as music_messaging
+# from conductor.common.music.model import base
+from conductor import messaging
+
+LOG = log.getLogger(__name__)
+
+
+class ConfigHook(hooks.PecanHook):
+    """Attach the configuration object to the request.
+
+    That allows controllers to get it.
+    """
+
+    def __init__(self, conf):
+        super(ConfigHook, self).__init__()
+        self.conf = conf
+
+    def on_route(self, state):
+        state.request.cfg = self.conf
+
+
+class MessagingHook(hooks.PecanHook):
+    """Create and attach a controller RPC client to the request."""
+
+    def __init__(self, conf):
+        super(MessagingHook, self).__init__()
+        topic = "controller"
+        transport = messaging.get_transport(conf=conf)
+        target = music_messaging.Target(topic=topic)
+        self.controller = \
+            music_messaging.RPCClient(conf=conf,
+                                      transport=transport,
+                                      target=target)
+
+    def on_route(self, state):
+        state.request.controller = self.controller
+
+
+# NOTE: We no longer use ModelHook, since the API should be asking
+# the controller (via RPC) for info about plans, not requesting them directly.
+
+# class ModelHook(hooks.PecanHook):
+#     """Create and attach dynamic model classes to the request."""
+#
+#     def __init__(self, conf):
+#         super(ModelHook, self).__init__()
+#
+#         # TODO(jdandrea) Move this to DBHook?
+#         music = api.API()
+#         music.keyspace_create(keyspace=conf.keyspace)
+#
+#         # Dynamically create a plan class for the specified keyspace
+#         self.Plan = base.create_dynamic_model(
+#             keyspace=conf.keyspace, baseclass=plan.Plan, classname="Plan")
+#
+#     def before(self, state):
+#         state.request.models = {
+#             "Plan": self.Plan,
+#         }
+
+
+# class DBHook(hooks.PecanHook):
+#
+#     def __init__(self):
+#         self.storage_connection = DBHook.get_connection('metering')
+#         self.event_storage_connection = DBHook.get_connection('event')
+#
+#         if (not self.storage_connection
+#            and not self.event_storage_connection):
+#             raise Exception("API failed to start. Failed to connect to "
+#                             "databases, purpose:  %s" %
+#                             ', '.join(['metering', 'event']))
+#
+#     def before(self, state):
+#         state.request.storage_conn = self.storage_connection
+#         state.request.event_storage_conn = self.event_storage_connection
+#
+#     @staticmethod
+#     def get_connection(purpose):
+#         try:
+#             return storage.get_connection_from_config(cfg.CONF, purpose)
+#         except Exception as err:
+#             params = {"purpose": purpose, "err": err}
+#             LOG.exception(_LE("Failed to connect to db, purpose %(purpose)s "
+#                               "retry later: %(err)s") % params)
+#
+#
+# class NotifierHook(hooks.PecanHook):
+#     """Create and attach a notifier to the request.
+#     Usually, samples will be push to notification bus by notifier when they
+#     are posted via /v2/meters/ API.
+#     """
+#
+#     def __init__(self):
+#         transport = messaging.get_transport()
+#         self.notifier = oslo_messaging.Notifier(
+#             transport, driver=cfg.CONF.publisher_notifier.homing_driver,
+#             publisher_id="conductor.api")
+#
+#     def before(self, state):
+#         state.request.notifier = self.notifier
+#
+#
+# class TranslationHook(hooks.PecanHook):
+#
+#     def after(self, state):
+#         # After a request has been done, we need to see if
+#         # ClientSideError has added an error onto the response.
+#         # If it has we need to get it info the thread-safe WSGI
+#         # environ to be used by the ParsableErrorMiddleware.
+#         if hasattr(state.response, 'translatable_error'):
+#             state.request.environ['translatable_error'] = (
+#                 state.response.translatable_error)
diff --git a/conductor/conductor/api/middleware.py b/conductor/conductor/api/middleware.py
new file mode 100644 (file)
index 0000000..dc0664a
--- /dev/null
@@ -0,0 +1,132 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+"""Middleware to replace the plain text message body of an error
+response with one formatted so the client can parse it.
+
+Based on pecan.middleware.errordocument
+"""
+
+import json
+
+from lxml import etree
+from oslo_log import log
+import six
+import webob
+
+from conductor import i18n
+from conductor.i18n import _
+
+LOG = log.getLogger(__name__)
+
+
+class ParsableErrorMiddleware(object):
+    """Replace error body with something the client can parse."""
+
+    @staticmethod
+    def best_match_language(accept_language):
+        """Determines best available locale from the Accept-Language header.
+
+        :returns: the best language match or None if the 'Accept-Language'
+                  header was not available in the request.
+        """
+        if not accept_language:
+            return None
+        all_languages = i18n.get_available_languages()
+        return accept_language.best_match(all_languages)
+
+    def __init__(self, app):
+        self.app = app
+
+    def __call__(self, environ, start_response):
+        # Request for this state, modified by replace_start_response()
+        # and used when an error is being reported.
+        state = {}
+
+        def replacement_start_response(status, headers, exc_info=None):
+            """Overrides the default response to make errors parsable."""
+            try:
+                status_code = int(status.split(' ')[0])
+                state['status_code'] = status_code
+            except (ValueError, TypeError):  # pragma: nocover
+                raise Exception((
+                    'ErrorDocumentMiddleware received an invalid '
+                    'status %s' % status
+                ))
+            else:
+                if (state['status_code'] // 100) not in (2, 3):
+                    # Remove some headers so we can replace them later
+                    # when we have the full error message and can
+                    # compute the length.
+                    headers = [(h, v)
+                               for (h, v) in headers
+                               if h not in ('Content-Length', 'Content-Type')
+                               ]
+                # Save the headers in case we need to modify them.
+                state['headers'] = headers
+                return start_response(status, headers, exc_info)
+
+        app_iter = self.app(environ, replacement_start_response)
+        if (state['status_code'] // 100) not in (2, 3):
+            req = webob.Request(environ)
+            error = environ.get('translatable_error')
+            user_locale = self.best_match_language(req.accept_language)
+            if (req.accept.best_match(['application/json', 'application/xml'])
+               == 'application/xml'):
+                content_type = 'application/xml'
+                try:
+                    # simple check xml is valid
+                    fault = etree.fromstring(b'\n'.join(app_iter))
+                    # Add the translated error to the xml data
+                    if error is not None:
+                        for fault_string in fault.findall('faultstring'):
+                            fault_string.text = i18n.translate(error,
+                                                               user_locale)
+                    error_message = etree.tostring(fault)
+                    body = b''.join((b'<error_message>',
+                                     error_message,
+                                     b'</error_message>'))
+                except etree.XMLSyntaxError as err:
+                    LOG.error(_('Error parsing HTTP response: %s'), err)
+                    error_message = state['status_code']
+                    body = '<error_message>%s</error_message>' % error_message
+                    if six.PY3:
+                        body = body.encode('utf-8')
+            else:
+                content_type = 'application/json'
+                app_data = b'\n'.join(app_iter)
+                if six.PY3:
+                    app_data = app_data.decode('utf-8')
+                try:
+                    fault = json.loads(app_data)
+                    if error is not None and 'faultstring' in fault:
+                        fault['faultstring'] = i18n.translate(error,
+                                                              user_locale)
+                except ValueError as err:
+                    fault = app_data
+                body = json.dumps({'error_message': fault})
+                if six.PY3:
+                    body = body.encode('utf-8')
+
+            state['headers'].append(('Content-Length', str(len(body))))
+            state['headers'].append(('Content-Type', content_type))
+            body = [body]
+        else:
+            body = app_iter
+        return body
diff --git a/conductor/conductor/api/rbac.py b/conductor/conductor/api/rbac.py
new file mode 100644 (file)
index 0000000..6caaad3
--- /dev/null
@@ -0,0 +1,106 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+"""Access Control Lists (ACL's) control access the API server."""
+
+from oslo_config import cfg
+from oslo_policy import policy
+import pecan
+
+_ENFORCER = None
+
+CONF = cfg.CONF
+
+
+def reset():
+    global _ENFORCER
+    if _ENFORCER:
+        _ENFORCER.clear()
+        _ENFORCER = None
+
+
+def _has_rule(name):
+    return name in _ENFORCER.rules.keys()
+
+
+def enforce(policy_name, request):
+    """Return the user and project the request should be limited to.
+
+    :param request: HTTP request
+    :param policy_name: the policy name to validate AuthZ against.
+    """
+    global _ENFORCER
+    if not _ENFORCER:
+        _ENFORCER = policy.Enforcer(CONF)
+        _ENFORCER.load_rules()
+
+    rule_method = "homing:" + policy_name
+    headers = request.headers
+
+    policy_dict = dict()
+    policy_dict['roles'] = headers.get('X-Roles', "").split(",")
+    policy_dict['user_id'] = (headers.get('X-User-Id'))
+    policy_dict['project_id'] = (headers.get('X-Project-Id'))
+
+    # maintain backward compat with Juno and previous by allowing the action if
+    # there is no rule defined for it
+    if ((_has_rule('default') or _has_rule(rule_method)) and
+            not _ENFORCER.enforce(rule_method, {}, policy_dict)):
+        pecan.core.abort(status_code=403, detail='RBAC Authorization Failed')
+
+
+# TODO(fabiog): these methods are still used because the scoping part is really
+# convoluted and difficult to separate out.
+
+def get_limited_to(headers):
+    """Return the user and project the request should be limited to.
+
+    :param headers: HTTP headers dictionary
+    :return: A tuple of (user, project), set to None if there's no limit on
+    one of these.
+    """
+    global _ENFORCER
+    if not _ENFORCER:
+        _ENFORCER = policy.Enforcer(CONF)
+        _ENFORCER.load_rules()
+
+    policy_dict = dict()
+    policy_dict['roles'] = headers.get('X-Roles', "").split(",")
+    policy_dict['user_id'] = (headers.get('X-User-Id'))
+    policy_dict['project_id'] = (headers.get('X-Project-Id'))
+
+    # maintain backward compat with Juno and previous by using context_is_admin
+    # rule if the segregation rule (added in Kilo) is not defined
+    rule_name = 'segregation' if _has_rule(
+        'segregation') else 'context_is_admin'
+    if not _ENFORCER.enforce(rule_name,
+                             {},
+                             policy_dict):
+        return headers.get('X-User-Id'), headers.get('X-Project-Id')
+
+    return None, None
+
+
+def get_limited_to_project(headers):
+    """Return the project the request should be limited to.
+
+    :param headers: HTTP headers dictionary
+    :return: A project, or None if there's no limit on it.
+    """
+    return get_limited_to(headers)[1]
diff --git a/conductor/conductor/controller/__init__.py b/conductor/conductor/controller/__init__.py
new file mode 100644 (file)
index 0000000..013ad0a
--- /dev/null
@@ -0,0 +1,20 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from .service import ControllerServiceLauncher  # noqa: F401
diff --git a/conductor/conductor/controller/rpc.py b/conductor/conductor/controller/rpc.py
new file mode 100644 (file)
index 0000000..fb385ac
--- /dev/null
@@ -0,0 +1,99 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import uuid
+
+
+class ControllerRPCEndpoint(object):
+    """Controller Endpoint"""
+
+    def __init__(self, conf, plan_class):
+        self.conf = conf
+        self.Plan = plan_class
+
+    def plan_create(self, ctx, arg):
+        """Create a plan"""
+        name = arg.get('name', str(uuid.uuid4()))
+        timeout = arg.get('timeout', self.conf.controller.timeout)
+        recommend_max = arg.get('limit', self.conf.controller.limit)
+        template = arg.get('template', None)
+        status = self.Plan.TEMPLATE
+        new_plan = self.Plan(name, timeout, recommend_max, template,
+                             status=status)
+
+        if new_plan:
+            plan_json = {
+                "plan": {
+                    "name": new_plan.name,
+                    "id": new_plan.id,
+                    "status": status,
+                }
+            }
+            rtn = {
+                'response': plan_json,
+                'error': False}
+        else:
+            # TODO(jdandrea): Catch and show the error here
+            rtn = {
+                'response': {},
+                'error': True}
+        return rtn
+
+    def plans_delete(self, ctx, arg):
+        """Delete one or more plans"""
+        plan_id = arg.get('plan_id')
+        if plan_id:
+            plans = self.Plan.query.filter_by(id=plan_id)
+        else:
+            plans = self.Plan.query.all()
+        for the_plan in plans:
+            the_plan.delete()
+
+        rtn = {
+            'response': {},
+            'error': False}
+        return rtn
+
+    def plans_get(self, ctx, arg):
+        """Get one or more plans"""
+        plan_id = arg.get('plan_id')
+        if plan_id:
+            plans = self.Plan.query.filter_by(id=plan_id)
+        else:
+            plans = self.Plan.query.all()
+
+        plan_list = []
+        for the_plan in plans:
+            plan_json = {
+                "name": the_plan.name,
+                "id": the_plan.id,
+                "status": the_plan.status,
+            }
+            if the_plan.message:
+                plan_json["message"] = the_plan.message
+            if the_plan.solution:
+                recs = the_plan.solution.get('recommendations')
+                if recs:
+                    plan_json["recommendations"] = recs
+            plan_list.append(plan_json)
+
+        rtn = {
+            'response': {"plans": plan_list},
+            'error': False}
+        return rtn
diff --git a/conductor/conductor/controller/service.py b/conductor/conductor/controller/service.py
new file mode 100644 (file)
index 0000000..d13518c
--- /dev/null
@@ -0,0 +1,104 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import cotyledon
+from oslo_config import cfg
+from oslo_log import log
+
+from conductor.common.models import plan
+from conductor.common.music import api
+from conductor.common.music import messaging as music_messaging
+from conductor.common.music.model import base
+from conductor.controller import rpc
+from conductor.controller import translator_svc
+from conductor import messaging
+from conductor import service
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+CONTROLLER_OPTS = [
+    cfg.IntOpt('timeout',
+               default=10,
+               min=1,
+               help='Timeout for planning requests. '
+                    'Default value is 10.'),
+    cfg.IntOpt('limit',
+               default=1,
+               min=1,
+               help='Maximum number of result sets to return. '
+                    'Default value is 1.'),
+    cfg.IntOpt('workers',
+               default=1,
+               min=1,
+               help='Number of workers for controller service. '
+                    'Default value is 1.'),
+    cfg.BoolOpt('concurrent',
+                default=False,
+                help='Set to True when controller will run in active-active '
+                     'mode. When set to False, controller will flush any '
+                     'abandoned messages at startup. The controller always '
+                     'restarts abandoned template translations at startup.'),
+]
+
+CONF.register_opts(CONTROLLER_OPTS, group='controller')
+
+# Pull in service opts. We use them here.
+OPTS = service.OPTS
+CONF.register_opts(OPTS)
+
+
+class ControllerServiceLauncher(object):
+    """Launcher for the controller service."""
+    def __init__(self, conf):
+        self.conf = conf
+
+        # Set up Music access.
+        self.music = api.API()
+        self.music.keyspace_create(keyspace=conf.keyspace)
+
+        # Dynamically create a plan class for the specified keyspace
+        self.Plan = base.create_dynamic_model(
+            keyspace=conf.keyspace, baseclass=plan.Plan, classname="Plan")
+
+        if not self.Plan:
+            raise
+
+    def run(self):
+        transport = messaging.get_transport(self.conf)
+        if transport:
+            topic = "controller"
+            target = music_messaging.Target(topic=topic)
+            endpoints = [rpc.ControllerRPCEndpoint(self.conf, self.Plan), ]
+            flush = not self.conf.controller.concurrent
+            kwargs = {'transport': transport,
+                      'target': target,
+                      'endpoints': endpoints,
+                      'flush': flush, }
+            svcmgr = cotyledon.ServiceManager()
+            svcmgr.add(music_messaging.RPCService,
+                       workers=self.conf.controller.workers,
+                       args=(self.conf,), kwargs=kwargs)
+
+            kwargs = {'plan_class': self.Plan, }
+            svcmgr.add(translator_svc.TranslatorService,
+                       workers=self.conf.controller.workers,
+                       args=(self.conf,), kwargs=kwargs)
+            svcmgr.run()
diff --git a/conductor/conductor/controller/translator.py b/conductor/conductor/controller/translator.py
new file mode 100644 (file)
index 0000000..eb467fe
--- /dev/null
@@ -0,0 +1,822 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import copy
+import datetime
+import json
+import os
+import uuid
+import yaml
+
+from oslo_config import cfg
+from oslo_log import log
+import six
+
+from conductor import __file__ as conductor_root
+from conductor.common.music import messaging as music_messaging
+from conductor.common import threshold
+from conductor import messaging
+from conductor import service
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+VERSIONS = ["2016-11-01", "2017-10-10"]
+LOCATION_KEYS = ['latitude', 'longitude', 'host_name', 'clli_code']
+INVENTORY_PROVIDERS = ['aai']
+INVENTORY_TYPES = ['cloud', 'service']
+DEFAULT_INVENTORY_PROVIDER = INVENTORY_PROVIDERS[0]
+CANDIDATE_KEYS = ['inventory_type', 'candidate_id', 'location_id',
+                  'location_type', 'cost']
+DEMAND_KEYS = ['inventory_provider', 'inventory_type', 'service_type',
+               'service_id', 'service_resource_id', 'customer_id',
+               'default_cost', 'candidates', 'region', 'complex',
+               'required_candidates', 'excluded_candidates',
+               'subdivision', 'flavor']
+CONSTRAINT_KEYS = ['type', 'demands', 'properties']
+CONSTRAINTS = {
+    # constraint_type: {
+    #   split: split into individual constraints, one per demand
+    #   required: list of required property names,
+    #   optional: list of optional property names,
+    #   thresholds: dict of property/base-unit pairs for threshold parsing
+    #   allowed: dict of keys and allowed values (if controlled vocab);
+    #            only use this for Conductor-controlled values!
+    # }
+    'attribute': {
+        'split': True,
+        'required': ['evaluate'],
+    },
+    'distance_between_demands': {
+        'required': ['distance'],
+        'thresholds': {
+            'distance': 'distance'
+        },
+    },
+    'distance_to_location': {
+        'split': True,
+        'required': ['distance', 'location'],
+        'thresholds': {
+            'distance': 'distance'
+        },
+    },
+    'instance_fit': {
+        'split': True,
+        'required': ['controller'],
+        'optional': ['request'],
+    },
+    'inventory_group': {},
+    'region_fit': {
+        'split': True,
+        'required': ['controller'],
+        'optional': ['request'],
+    },
+    'zone': {
+        'required': ['qualifier', 'category'],
+        'allowed': {'qualifier': ['same', 'different'],
+                    'category': ['disaster', 'region', 'complex',
+                                 'time', 'maintenance']},
+    },
+}
+
+
+class TranslatorException(Exception):
+    pass
+
+
+class Translator(object):
+    """Template translator.
+
+    Takes an input template and translates it into
+    something the solver can use. Calls the data service
+    as needed, giving it the inventory provider as context.
+    Presently the only inventory provider is A&AI. Others
+    may be added in the future.
+    """
+
+    def __init__(self, conf, plan_name, plan_id, template):
+        self.conf = conf
+        self._template = copy.deepcopy(template)
+        self._plan_name = plan_name
+        self._plan_id = plan_id
+        self._translation = None
+        self._valid = False
+        self._ok = False
+
+        # Set up the RPC service(s) we want to talk to.
+        self.data_service = self.setup_rpc(self.conf, "data")
+
+    def setup_rpc(self, conf, topic):
+        """Set up the RPC Client"""
+        # TODO(jdandrea): Put this pattern inside music_messaging?
+        transport = messaging.get_transport(conf=conf)
+        target = music_messaging.Target(topic=topic)
+        client = music_messaging.RPCClient(conf=conf,
+                                           transport=transport,
+                                           target=target)
+        return client
+
+    def create_components(self):
+        # TODO(jdandrea): Make deep copies so the template is untouched
+        self._version = self._template.get("homing_template_version")
+        self._parameters = self._template.get("parameters", {})
+        self._locations = self._template.get("locations", {})
+        self._demands = self._template.get("demands", {})
+        self._constraints = self._template.get("constraints", {})
+        self._optmization = self._template.get("optimization", {})
+        self._reservations = self._template.get("reservation", {})
+
+        if type(self._version) is datetime.date:
+            self._version = str(self._version)
+
+    def validate_components(self):
+        """Cursory validation of template components.
+
+        More detailed validation happens while parsing each component.
+        """
+        self._valid = False
+
+        # Check version
+        if self._version not in VERSIONS:
+            raise TranslatorException(
+                "conductor_template_version must be one "
+                "of: {}".format(', '.join(VERSIONS)))
+
+        # Check top level structure
+        components = {
+            "parameters": {
+                "name": "Parameter",
+                "content": self._parameters,
+            },
+            "locations": {
+                "name": "Location",
+                "keys": LOCATION_KEYS,
+                "content": self._locations,
+            },
+            "demands": {
+                "name": "Demand",
+                "content": self._demands,
+            },
+            "constraints": {
+                "name": "Constraint",
+                "keys": CONSTRAINT_KEYS,
+                "content": self._constraints,
+            },
+            "optimization": {
+                "name": "Optimization",
+                "content": self._optmization,
+            },
+            "reservations": {
+                "name": "Reservation",
+                "content": self._reservations,
+            }
+        }
+        for name, component in components.items():
+            name = component.get('name')
+            keys = component.get('keys', None)
+            content = component.get('content')
+
+            if type(content) is not dict:
+                raise TranslatorException(
+                    "{} section must be a dictionary".format(name))
+            for content_name, content_def in content.items():
+                if not keys:
+                    continue
+
+                for key in content_def:
+                    if key not in keys:
+                        raise TranslatorException(
+                            "{} {} has an invalid key {}".format(
+                                name, content_name, key))
+
+        demand_keys = self._demands.keys()
+        location_keys = self._locations.keys()
+        for constraint_name, constraint in self._constraints.items():
+
+            # Require a single demand (string), or a list of one or more.
+            demands = constraint.get('demands')
+            if isinstance(demands, six.string_types):
+                demands = [demands]
+            if not isinstance(demands, list) or len(demands) < 1:
+                raise TranslatorException(
+                    "Demand list for Constraint {} must be "
+                    "a list of names or a string with one name".format(
+                        constraint_name))
+            if not set(demands).issubset(demand_keys):
+                raise TranslatorException(
+                    "Undefined Demand(s) {} in Constraint '{}'".format(
+                        list(set(demands).difference(demand_keys)),
+                        constraint_name))
+
+            properties = constraint.get('properties', None)
+            if properties:
+                location = properties.get('location', None)
+                if location:
+                    if location not in location_keys:
+                        raise TranslatorException(
+                            "Location {} in Constraint {} is undefined".format(
+                                location, constraint_name))
+
+        self._valid = True
+
+    def _parse_parameters(self, obj, path=[]):
+        """Recursively parse all {get_param: X} occurrences
+
+        This modifies obj in-place. If you want to keep the original,
+        pass in a deep copy.
+        """
+        # Ok to start with a string ...
+        if isinstance(path, six.string_types):
+            # ... but the breadcrumb trail goes in an array.
+            path = [path]
+
+        # Traverse a list
+        if type(obj) is list:
+            for idx, val in enumerate(obj, start=0):
+                # Add path to the breadcrumb trail
+                new_path = list(path)
+                new_path[-1] += "[{}]".format(idx)
+
+                # Look at each element.
+                obj[idx] = self._parse_parameters(val, new_path)
+
+        # Traverse a dict
+        elif type(obj) is dict:
+            # Did we find a "{get_param: ...}" intrinsic?
+            if obj.keys() == ['get_param']:
+                param_name = obj['get_param']
+
+                # The parameter name must be a string.
+                if not isinstance(param_name, six.string_types):
+                    path_str = ' > '.join(path)
+                    raise TranslatorException(
+                        "Parameter name '{}' not a string in path {}".format(
+                            param_name, path_str))
+
+                # Parameter name must be defined.
+                if param_name not in self._parameters:
+                    path_str = ' > '.join(path)
+                    raise TranslatorException(
+                        "Parameter '{}' undefined in path {}".format(
+                            param_name, path_str))
+
+                # Return the value in place of the call.
+                return self._parameters.get(param_name)
+
+            # Not an intrinsic. Traverse as usual.
+            for key in obj.keys():
+                # Add path to the breadcrumb trail.
+                new_path = list(path)
+                new_path.append(key)
+
+                # Look at each key/value pair.
+                obj[key] = self._parse_parameters(obj[key], new_path)
+
+        # Return whatever we have after unwinding.
+        return obj
+
+    def parse_parameters(self):
+        """Resolve all parameters references."""
+        locations = copy.deepcopy(self._locations)
+        self._locations = self._parse_parameters(locations, 'locations')
+
+        demands = copy.deepcopy(self._demands)
+        self._demands = self._parse_parameters(demands, 'demands')
+
+        constraints = copy.deepcopy(self._constraints)
+        self._constraints = self._parse_parameters(constraints, 'constraints')
+
+        reservations = copy.deepcopy(self._reservations)
+        self._reservations = self._parse_parameters(reservations,
+                                                    'reservations')
+
+    def parse_locations(self, locations):
+        """Prepare the locations for use by the solver."""
+        parsed = {}
+        for location, args in locations.items():
+            ctxt = {}
+            response = self.data_service.call(
+                ctxt=ctxt,
+                method="resolve_location",
+                args=args)
+
+            resolved_location = \
+                response and response.get('resolved_location')
+            if not resolved_location:
+                raise TranslatorException(
+                    "Unable to resolve location {}".format(location)
+                )
+            parsed[location] = resolved_location
+        return parsed
+
+    def parse_demands(self, demands):
+        """Validate/prepare demands for use by the solver."""
+        if type(demands) is not dict:
+            raise TranslatorException("Demands must be provided in "
+                                      "dictionary form")
+
+        # Look at each demand
+        demands_copy = copy.deepcopy(demands)
+        parsed = {}
+        for name, requirements in demands_copy.items():
+            inventory_candidates = []
+            for requirement in requirements:
+                for key in requirement:
+                    if key not in DEMAND_KEYS:
+                        raise TranslatorException(
+                            "Demand {} has an invalid key {}".format(
+                                requirement, key))
+
+                if 'candidates' in requirement:
+                    # Candidates *must* specify an inventory provider
+                    provider = requirement.get("inventory_provider")
+                    if provider and provider not in INVENTORY_PROVIDERS:
+                        raise TranslatorException(
+                            "Unsupported inventory provider {} "
+                            "in demand {}".format(provider, name))
+                    else:
+                        provider = DEFAULT_INVENTORY_PROVIDER
+
+                    # Check each candidate
+                    for candidate in requirement.get('candidates'):
+                        # Must be a dictionary
+                        if type(candidate) is not dict:
+                            raise TranslatorException(
+                                "Candidate found in demand {} that is "
+                                "not a dictionary".format(name))
+
+                        # Must have only supported keys
+                        for key in candidate.keys():
+                            if key not in CANDIDATE_KEYS:
+                                raise TranslatorException(
+                                    "Candidate with invalid key {} found "
+                                    "in demand {}".format(key, name)
+                                )
+
+                        # TODO(jdandrea): Check required/optional keys
+
+                        # Set the inventory provider if not already
+                        candidate['inventory_provider'] = \
+                            candidate.get('inventory_provider', provider)
+
+                        # Set cost if not already (default cost is 0?)
+                        candidate['cost'] = candidate.get('cost', 0)
+
+                        # Add to our list of parsed candidates
+                        inventory_candidates.append(candidate)
+
+                # candidates are specified through inventory providers
+                # Do the basic sanity checks for inputs
+                else:
+                    # inventory provider MUST be specified
+                    provider = requirement.get("inventory_provider")
+                    if not provider:
+                        raise TranslatorException(
+                            "Inventory provider not specified "
+                            "in demand {}".format(name)
+                        )
+                    elif provider and provider not in INVENTORY_PROVIDERS:
+                        raise TranslatorException(
+                            "Unsupported inventory provider {} "
+                            "in demand {}".format(provider, name)
+                        )
+                    else:
+                        provider = DEFAULT_INVENTORY_PROVIDER
+                        requirement['provider'] = provider
+
+                    # inventory type MUST be specified
+                    inventory_type = requirement.get('inventory_type')
+                    if not inventory_type or inventory_type == '':
+                        raise TranslatorException(
+                            "Inventory type not specified for "
+                            "demand {}".format(name)
+                        )
+                    if inventory_type and \
+                            inventory_type not in INVENTORY_TYPES:
+                        raise TranslatorException(
+                            "Unknown inventory type {} specified for "
+                            "demand {}".format(inventory_type, name)
+                        )
+
+                    # For service inventories, customer_id and
+                    # service_type MUST be specified
+                    if inventory_type == 'service':
+                        customer_id = requirement.get('customer_id')
+                        if not customer_id:
+                            raise TranslatorException(
+                                "Customer ID not specified for "
+                                "demand {}".format(name)
+                            )
+                        service_type = requirement.get('service_type')
+                        if not service_type:
+                            raise TranslatorException(
+                                "Service Type not specified for "
+                                "demand {}".format(name)
+                            )
+
+                # TODO(jdandrea): Check required/optional keys for requirement
+                # elif 'inventory_type' in requirement:
+                #     # For now this is just a stand-in candidate
+                #     candidate = {
+                #         'inventory_provider':
+                #             requirement.get('inventory_provider',
+                #                             DEFAULT_INVENTORY_PROVIDER),
+                #         'inventory_type':
+                #             requirement.get('inventory_type', ''),
+                #         'candidate_id': '',
+                #         'location_id': '',
+                #         'location_type': '',
+                #         'cost': 0,
+                #     }
+                #
+                #     # Add to our list of parsed candidates
+                #     inventory_candidates.append(candidate)
+
+            # Ask conductor-data for one or more candidates.
+            ctxt = {
+                "plan_id": self._plan_id,
+                "plan_name": self._plan_name,
+            }
+            args = {
+                "demands": {
+                    name: requirements,
+                }
+            }
+
+            # Check if required_candidate and excluded candidate
+            # are mutually exclusive.
+            for requirement in requirements:
+                required_candidates = requirement.get("required_candidates")
+                excluded_candidates = requirement.get("excluded_candidates")
+                if (required_candidates and
+                    excluded_candidates and
+                    set(map(lambda entry: entry['candidate_id'],
+                        required_candidates))
+                    & set(map(lambda entry: entry['candidate_id'],
+                          excluded_candidates))):
+                    raise TranslatorException(
+                        "Required candidate list and excluded candidate"
+                        " list are not mutually exclusive for demand"
+                        " {}".format(name)
+                    )
+
+            response = self.data_service.call(
+                ctxt=ctxt,
+                method="resolve_demands",
+                args=args)
+
+            resolved_demands = \
+                response and response.get('resolved_demands')
+
+            required_candidates = resolved_demands\
+                .get('required_candidates')
+            if not resolved_demands:
+                raise TranslatorException(
+                    "Unable to resolve inventory "
+                    "candidates for demand {}"
+                    .format(name)
+                )
+            resolved_candidates = resolved_demands.get(name)
+            for candidate in resolved_candidates:
+                inventory_candidates.append(candidate)
+            if len(inventory_candidates) < 1:
+                if not required_candidates:
+                    raise TranslatorException(
+                        "Unable to find any candidate for "
+                        "demand {}".format(name)
+                    )
+                else:
+                    raise TranslatorException(
+                        "Unable to find any required "
+                        "candidate for demand {}"
+                        .format(name)
+                    )
+            parsed[name] = {
+                "candidates": inventory_candidates,
+            }
+
+        return parsed
+
+    def parse_constraints(self, constraints):
+        """Validate/prepare constraints for use by the solver."""
+        if type(constraints) is not dict:
+            raise TranslatorException("Constraints must be provided in "
+                                      "dictionary form")
+
+        # Look at each constraint. Properties must exist, even if empty.
+        constraints_copy = copy.deepcopy(constraints)
+
+        parsed = {}
+        for name, constraint in constraints_copy.items():
+
+            if not constraint.get('properties'):
+                constraint['properties'] = {}
+
+            constraint_type = constraint.get('type')
+            constraint_def = CONSTRAINTS.get(constraint_type)
+
+            # Is it a supported type?
+            if constraint_type not in CONSTRAINTS:
+                raise TranslatorException(
+                    "Unsupported type '{}' found in constraint "
+                    "named '{}'".format(constraint_type, name))
+
+            # Now walk through the constraint's content
+            for key, value in constraint.items():
+                # Must be a supported key
+                if key not in CONSTRAINT_KEYS:
+                    raise TranslatorException(
+                        "Invalid key '{}' found in constraint "
+                        "named '{}'".format(key, name))
+
+                # For properties ...
+                if key == 'properties':
+                    # Make sure all required properties are present
+                    required = constraint_def.get('required', [])
+                    for req_prop in required:
+                        if req_prop not in value.keys():
+                            raise TranslatorException(
+                                "Required property '{}' not found in "
+                                "constraint named '{}'".format(
+                                    req_prop, name))
+                        if not value.get(req_prop) \
+                                or value.get(req_prop) == '':
+                            raise TranslatorException(
+                                "No value specified for property '{}' in "
+                                "constraint named '{}'".format(
+                                    req_prop, name))
+
+                    # Make sure there are no unknown properties
+                    optional = constraint_def.get('optional', [])
+                    for prop_name in value.keys():
+                        if prop_name not in required + optional:
+                            raise TranslatorException(
+                                "Unknown property '{}' in "
+                                "constraint named '{}'".format(
+                                    prop_name, name))
+
+                    # If a property has a controlled vocabulary, make
+                    # sure its value is one of the allowed ones.
+                    allowed = constraint_def.get('allowed', {})
+                    for prop_name, allowed_values in allowed.items():
+                        if prop_name in value.keys():
+                            prop_value = value.get(prop_name, '')
+                            if prop_value not in allowed_values:
+                                raise TranslatorException(
+                                    "Property '{}' value '{}' unsupported in "
+                                    "constraint named '{}' (must be one of "
+                                    "{})".format(prop_name, prop_value,
+                                                 name, allowed_values))
+
+                    # Break all threshold-formatted values into parts
+                    thresholds = constraint_def.get('thresholds', {})
+                    for thr_prop, base_units in thresholds.items():
+                        if thr_prop in value.keys():
+                            expression = value.get(thr_prop)
+                            thr = threshold.Threshold(expression, base_units)
+                            value[thr_prop] = thr.parts
+
+            # We already know we have one or more demands due to
+            # validate_components(). We still need to coerce the demands
+            # into a list in case only one demand was provided.
+            constraint_demands = constraint.get('demands')
+            if isinstance(constraint_demands, six.string_types):
+                constraint['demands'] = [constraint_demands]
+
+            # Either split the constraint into parts, one per demand,
+            # or use it as-is
+            if constraint_def.get('split'):
+                for demand in constraint.get('demands', []):
+                    constraint_demand = name + '_' + demand
+                    parsed[constraint_demand] = copy.deepcopy(constraint)
+                    parsed[constraint_demand]['name'] = name
+                    parsed[constraint_demand]['demands'] = demand
+            else:
+                parsed[name] = copy.deepcopy(constraint)
+                parsed[name]['name'] = name
+
+        return parsed
+
+    def parse_optimization(self, optimization):
+        """Validate/prepare optimization for use by the solver."""
+
+        # WARNING: The template format for optimization is generalized,
+        # however the solver is very particular about the expected
+        # goal, functions, and operands. Therefore, for the time being,
+        # we are choosing to be highly conservative in what we accept
+        # at the template level. Once the solver can handle the more
+        # general form, we can make the translation pass using standard
+        # compiler techniques and tools like antlr (antlr4-python2-runtime).
+
+        if not optimization:
+            LOG.debug("No objective function or "
+                      "optimzation provided in the template")
+            return
+
+        optimization_copy = copy.deepcopy(optimization)
+        parsed = {
+            "goal": "min",
+            "operation": "sum",
+            "operands": [],
+        }
+
+        if type(optimization_copy) is not dict:
+            raise TranslatorException("Optimization must be a dictionary.")
+
+        goals = optimization_copy.keys()
+        if goals != ['minimize']:
+            raise TranslatorException(
+                "Optimization must contain a single goal of 'minimize'.")
+
+        funcs = optimization_copy['minimize'].keys()
+        if funcs != ['sum']:
+            raise TranslatorException(
+                "Optimization goal 'minimize' must "
+                "contain a single function of 'sum'.")
+
+        operands = optimization_copy['minimize']['sum']
+        if type(operands) is not list:
+            # or len(operands) != 2:
+            raise TranslatorException(
+                "Optimization goal 'minimize', function 'sum' "
+                "must be a list of exactly two operands.")
+
+        def get_distance_between_args(operand):
+            args = operand.get('distance_between')
+            if type(args) is not list and len(args) != 2:
+                raise TranslatorException(
+                    "Optimization 'distance_between' arguments must "
+                    "be a list of length two.")
+
+            got_demand = False
+            got_location = False
+            for arg in args:
+                if not got_demand and arg in self._demands.keys():
+                    got_demand = True
+                if not got_location and arg in self._locations.keys():
+                    got_location = True
+            if not got_demand or not got_location:
+                raise TranslatorException(
+                    "Optimization 'distance_between' arguments {} must "
+                    "include one valid demand name and one valid "
+                    "location name.".format(args))
+
+            return args
+
+        for operand in operands:
+            weight = 1.0
+            args = None
+
+            if operand.keys() == ['distance_between']:
+                # Value must be a list of length 2 with one
+                # location and one demand
+                args = get_distance_between_args(operand)
+
+            elif operand.keys() == ['product']:
+                for product_op in operand['product']:
+                    if threshold.is_number(product_op):
+                        weight = product_op
+                    elif type(product_op) is dict:
+                        if product_op.keys() == ['distance_between']:
+                            function = 'distance_between'
+                            args = get_distance_between_args(product_op)
+                        elif product_op.keys() == ['cloud_version']:
+                            function = 'cloud_version'
+                            args = product_op.get('cloud_version')
+
+                if not args:
+                    raise TranslatorException(
+                        "Optimization products must include at least "
+                        "one 'distance_between' function call and "
+                        "one optional number to be used as a weight.")
+
+            # We now have our weight/function_param.
+            parsed['operands'].append(
+                {
+                    "operation": "product",
+                    "weight": weight,
+                    "function": function,
+                    "function_param": args,
+                }
+            )
+        return parsed
+
+    def parse_reservations(self, reservations):
+        demands = self._demands
+        if type(reservations) is not dict:
+            raise TranslatorException("Reservations must be provided in "
+                                      "dictionary form")
+
+        parsed = {}
+        if reservations:
+            parsed['counter'] = 0
+        for name, reservation in reservations.items():
+            if not reservation.get('properties'):
+                reservation['properties'] = {}
+            for demand in reservation.get('demands', []):
+                if demand in demands.keys():
+                    constraint_demand = name + '_' + demand
+                    parsed['demands'] = {}
+                    parsed['demands'][constraint_demand] = \
+                        copy.deepcopy(reservation)
+                    parsed['demands'][constraint_demand]['name'] = name
+                    parsed['demands'][constraint_demand]['demand'] = demand
+
+        return parsed
+
+    def do_translation(self):
+        """Perform the translation."""
+        if not self.valid:
+            raise TranslatorException("Can't translate an invalid template.")
+        self._translation = {
+            "conductor_solver": {
+                "version": self._version,
+                "plan_id": self._plan_id,
+                "locations": self.parse_locations(self._locations),
+                "demands": self.parse_demands(self._demands),
+                "constraints": self.parse_constraints(self._constraints),
+                "objective": self.parse_optimization(self._optmization),
+                "reservations": self.parse_reservations(self._reservations),
+            }
+        }
+
+    def translate(self):
+        """Translate the template for the solver."""
+        self._ok = False
+        try:
+            self.create_components()
+            self.validate_components()
+            self.parse_parameters()
+            self.do_translation()
+            self._ok = True
+        except Exception as exc:
+            self._error_message = exc.message
+
+    @property
+    def valid(self):
+        """Returns True if the template has been validated."""
+        return self._valid
+
+    @property
+    def ok(self):
+        """Returns True if the translation was successful."""
+        return self._ok
+
+    @property
+    def translation(self):
+        """Returns the translation if it was successful."""
+        return self._translation
+
+    @property
+    def error_message(self):
+        """Returns the last known error message."""
+        return self._error_message
+
+
+def main():
+    template_name = 'some_template'
+
+    path = os.path.abspath(conductor_root)
+    dir_path = os.path.dirname(path)
+
+    # Prepare service-wide components (e.g., config)
+    conf = service.prepare_service(
+        [], config_files=[dir_path + '/../etc/conductor/conductor.conf'])
+    # conf.set_override('mock', True, 'music_api')
+
+    t1 = threshold.Threshold("< 500 ms", "time")
+    t2 = threshold.Threshold("= 120 mi", "distance")
+    t3 = threshold.Threshold("160", "currency")
+    t4 = threshold.Threshold("60-80 Gbps", "throughput")
+    print('t1: {}\nt2: {}\nt3: {}\nt4: {}\n'.format(t1, t2, t3, t4))
+
+    template_file = dir_path + '/tests/data/' + template_name + '.yaml'
+    fd = open(template_file, "r")
+    template = yaml.load(fd)
+
+    trns = Translator(conf, template_name, str(uuid.uuid4()), template)
+    trns.translate()
+    if trns.ok:
+        print(json.dumps(trns.translation, indent=2))
+    else:
+        print("TESTING - Translator Error: {}".format(trns.error_message))
+
+if __name__ == '__main__':
+    main()
diff --git a/conductor/conductor/controller/translator_svc.py b/conductor/conductor/controller/translator_svc.py
new file mode 100644 (file)
index 0000000..425ff36
--- /dev/null
@@ -0,0 +1,162 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import time
+
+import cotyledon
+import futurist
+from oslo_config import cfg
+from oslo_log import log
+
+from conductor.common.music import api
+from conductor.common.music import messaging as music_messaging
+from conductor.controller import translator
+from conductor.i18n import _LE, _LI
+from conductor import messaging
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+CONTROLLER_OPTS = [
+    cfg.IntOpt('polling_interval',
+               default=1,
+               min=1,
+               help='Time between checking for new plans. '
+                    'Default value is 1.'),
+]
+
+CONF.register_opts(CONTROLLER_OPTS, group='controller')
+
+
+class TranslatorService(cotyledon.Service):
+    """Template Translator service.
+
+    This service looks for untranslated templates and
+    preps them for solving by the Solver service.
+    """
+
+    # This will appear in 'ps xaf'
+    name = "Template Translator"
+
+    def __init__(self, worker_id, conf, **kwargs):
+        """Initializer"""
+        LOG.debug("%s" % self.__class__.__name__)
+        super(TranslatorService, self).__init__(worker_id)
+        self._init(conf, **kwargs)
+        self.running = True
+
+    def _init(self, conf, **kwargs):
+        self.conf = conf
+        self.Plan = kwargs.get('plan_class')
+        self.kwargs = kwargs
+
+        # Set up the RPC service(s) we want to talk to.
+        self.data_service = self.setup_rpc(conf, "data")
+
+        # Set up Music access.
+        self.music = api.API()
+
+    def _gracefully_stop(self):
+        """Gracefully stop working on things"""
+        pass
+
+    def _restart(self):
+        """Prepare to restart the service"""
+        pass
+
+    def setup_rpc(self, conf, topic):
+        """Set up the RPC Client"""
+        # TODO(jdandrea): Put this pattern inside music_messaging?
+        transport = messaging.get_transport(conf=conf)
+        target = music_messaging.Target(topic=topic)
+        client = music_messaging.RPCClient(conf=conf,
+                                           transport=transport,
+                                           target=target)
+        return client
+
+    def translate(self, plan):
+        """Translate the plan to a format the solver can use"""
+        # Update the translation field and set status to TRANSLATED.
+        try:
+            LOG.info(_LI("Requesting plan {} translation").format(
+                plan.id))
+            trns = translator.Translator(
+                self.conf, plan.name, plan.id, plan.template)
+            trns.translate()
+            if trns.ok:
+                plan.translation = trns.translation
+                plan.status = self.Plan.TRANSLATED
+                LOG.info(_LI(
+                    "Plan {} translated. Ready for solving").format(
+                    plan.id))
+            else:
+                plan.message = trns.error_message
+                plan.status = self.Plan.ERROR
+                LOG.error(_LE(
+                    "Plan {} translation error encountered").format(
+                    plan.id))
+        except Exception as ex:
+            template = "An exception of type {0} occurred, arguments:\n{1!r}"
+            plan.message = template.format(type(ex).__name__, ex.args)
+            plan.status = self.Plan.ERROR
+
+        plan.update()
+
+    def __check_for_templates(self):
+        """Wait for the polling interval, then do the real template check."""
+
+        # Wait for at least poll_interval sec
+        polling_interval = self.conf.controller.polling_interval
+        time.sleep(polling_interval)
+
+        # Look for plans with the status set to TEMPLATE
+        plans = self.Plan.query.all()
+        for plan in plans:
+            # If there's a template to be translated, do it!
+            if plan.status == self.Plan.TEMPLATE:
+                self.translate(plan)
+                break
+            elif plan.timedout:
+                # Move plan to error status? Create a new timed-out status?
+                # todo(snarayanan)
+                continue
+
+    def run(self):
+        """Run"""
+        LOG.debug("%s" % self.__class__.__name__)
+
+        # Look for templates to translate from within a thread
+        executor = futurist.ThreadPoolExecutor()
+        while self.running:
+            fut = executor.submit(self.__check_for_templates)
+            fut.result()
+        executor.shutdown()
+
+    def terminate(self):
+        """Terminate"""
+        LOG.debug("%s" % self.__class__.__name__)
+        self.running = False
+        self._gracefully_stop()
+        super(TranslatorService, self).terminate()
+
+    def reload(self):
+        """Reload"""
+        LOG.debug("%s" % self.__class__.__name__)
+        self._restart()
diff --git a/conductor/conductor/data/__init__.py b/conductor/conductor/data/__init__.py
new file mode 100644 (file)
index 0000000..9c965aa
--- /dev/null
@@ -0,0 +1,20 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from .service import DataServiceLauncher  # noqa: F401
diff --git a/conductor/conductor/data/plugins/__init__.py b/conductor/conductor/data/plugins/__init__.py
new file mode 100644 (file)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/data/plugins/base.py b/conductor/conductor/data/plugins/base.py
new file mode 100644 (file)
index 0000000..a124e29
--- /dev/null
@@ -0,0 +1,30 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import abc
+
+from oslo_log import log
+import six
+
+LOG = log.getLogger(__name__)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DataPlugin(object):
+    """Base Data Plugin Class"""
diff --git a/conductor/conductor/data/plugins/inventory_provider/__init__.py b/conductor/conductor/data/plugins/inventory_provider/__init__.py
new file mode 100644 (file)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/data/plugins/inventory_provider/aai.py b/conductor/conductor/data/plugins/inventory_provider/aai.py
new file mode 100644 (file)
index 0000000..35b4ba7
--- /dev/null
@@ -0,0 +1,1070 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import re
+import time
+import uuid
+
+
+from oslo_config import cfg
+from oslo_log import log
+
+from conductor.common import rest
+from conductor.data.plugins.inventory_provider import base
+from conductor.i18n import _LE, _LI
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+AAI_OPTS = [
+    cfg.IntOpt('cache_refresh_interval',
+               default=1440,
+               help='Interval with which to refresh the local cache, '
+                    'in minutes.'),
+    cfg.IntOpt('complex_cache_refresh_interval',
+               default=1440,
+               help='Interval with which to refresh the local complex cache, '
+                    'in minutes.'),
+    cfg.StrOpt('table_prefix',
+               default='aai',
+               help='Data Store table prefix.'),
+    cfg.StrOpt('server_url',
+               default='https://controller:8443/aai',
+               help='Base URL for A&AI, up to and not including '
+                    'the version, and without a trailing slash.'),
+    cfg.StrOpt('server_url_version',
+               default='v10',
+               help='The version of A&AI in v# format.'),
+    cfg.StrOpt('certificate_file',
+               default='certificate.pem',
+               help='SSL/TLS certificate file in pem format. '
+                    'This certificate must be registered with the A&AI '
+                    'endpoint.'),
+    cfg.StrOpt('certificate_key_file',
+               default='certificate_key.pem',
+               help='Private Certificate Key file in pem format.'),
+    cfg.StrOpt('certificate_authority_bundle_file',
+               default='certificate_authority_bundle.pem',
+               help='Certificate Authority Bundle file in pem format. '
+                    'Must contain the appropriate trust chain for the '
+                    'Certificate file.'),
+]
+
+CONF.register_opts(AAI_OPTS, group='aai')
+
+
+class AAI(base.InventoryProviderBase):
+    """Active and Available Inventory Provider"""
+
+    def __init__(self):
+        """Initializer"""
+
+        # FIXME(jdandrea): Pass this in to init.
+        self.conf = CONF
+
+        self.base = self.conf.aai.server_url.rstrip('/')
+        self.version = self.conf.aai.server_url_version.rstrip('/')
+        self.cert = self.conf.aai.certificate_file
+        self.key = self.conf.aai.certificate_key_file
+        self.verify = self.conf.aai.certificate_authority_bundle_file
+        self.cache_refresh_interval = self.conf.aai.cache_refresh_interval
+        self.last_refresh_time = None
+        self.complex_cache_refresh_interval = \
+            self.conf.aai.complex_cache_refresh_interval
+        self.complex_last_refresh_time = None
+
+        # TODO(jdandrea): Make these config options?
+        self.timeout = 30
+        self.retries = 3
+
+        kwargs = {
+            "server_url": self.base,
+            "retries": self.retries,
+            "cert_file": self.cert,
+            "cert_key_file": self.key,
+            "ca_bundle_file": self.verify,
+            "log_debug": self.conf.debug,
+        }
+        self.rest = rest.REST(**kwargs)
+
+        # Cache is initially empty
+        self._aai_cache = {}
+        self._aai_complex_cache = {}
+
+    def initialize(self):
+        """Perform any late initialization."""
+
+        # Refresh the cache once for now
+        self._refresh_cache()
+
+        # TODO(jdandrea): Make this periodic, and without a True condition!
+        # executor = futurist.ThreadPoolExecutor()
+        # while True:
+        #     fut = executor.submit(self.refresh_cache)
+        #     fut.result()
+        #
+        #     # Now wait for the next time.
+        #     # FIXME(jdandrea): Put inside refresh_cache()?
+        #     refresh_interval = self.conf.aai.cache_refresh_interval
+        #     time.sleep(refresh_interval)
+        # executor.shutdown()
+
+    def name(self):
+        """Return human-readable name."""
+        return "A&AI"
+
+    def _get_version_from_string(self, string):
+        """Extract version number from string"""
+        return re.sub("[^0-9.]", "", string)
+
+    def _aai_versioned_path(self, path):
+        """Return a URL path with the A&AI version prepended"""
+        return '/{}/{}'.format(self.version, path.lstrip('/'))
+
+    def _request(self, method='get', path='/', data=None,
+                 context=None, value=None):
+        """Performs HTTP request."""
+        headers = {
+            'X-FromAppId': 'CONDUCTOR',
+            'X-TransactionId': str(uuid.uuid4()),
+        }
+        kwargs = {
+            "method": method,
+            "path": path,
+            "headers": headers,
+            "data": data,
+        }
+
+        # TODO(jdandrea): Move timing/response logging into the rest helper?
+        start_time = time.time()
+        response = self.rest.request(**kwargs)
+        elapsed = time.time() - start_time
+        LOG.debug("Total time for A&AI request "
+                  "({0:}: {1:}): {2:.3f} sec".format(context, value, elapsed))
+
+        if response is None:
+            LOG.error(_LE("No response from A&AI ({}: {})").
+                      format(context, value))
+        elif response.status_code != 200:
+            LOG.error(_LE("A&AI request ({}: {}) returned HTTP "
+                          "status {} {}, link: {}{}").
+                      format(context, value,
+                             response.status_code, response.reason,
+                             self.base, path))
+        return response
+
+    def _refresh_cache(self):
+        """Refresh the A&AI cache."""
+        if not self.last_refresh_time or \
+            (time.time() - self.last_refresh_time) > \
+                self.cache_refresh_interval * 60:
+            # TODO(snarayanan):
+            # The cache is not persisted to Music currently.
+            # A general purpose ORM caching
+            # object likely needs to be made, with a key (hopefully we
+            # can use one that is not just a UUID), a value, and a
+            # timestamp. The other alternative is to not use the ORM
+            # layer and call the API directly, but that is
+            # also trading one set of todos for another ...
+
+            # Get all A&AI sites
+            LOG.info(_LI("**** Refreshing A&AI cache *****"))
+            path = self._aai_versioned_path(
+                '/cloud-infrastructure/cloud-regions/?depth=0')
+            response = self._request(
+                path=path, context="cloud regions", value="all")
+            if response is None:
+                return
+            regions = {}
+            if response.status_code == 200:
+                body = response.json()
+                regions = body.get('cloud-region', {})
+            if not regions:
+                # Nothing to update the cache with
+                LOG.error(_LE("A&AI returned no regions, link: {}{}").
+                          format(self.base, path))
+                return
+            cache = {
+                'cloud_region': {},
+                'service': {},
+            }
+            for region in regions:
+                cloud_region_version = region.get('cloud-region-version')
+                cloud_region_id = region.get('cloud-region-id')
+                cloud_owner = region.get('cloud-owner')
+                if not (cloud_region_version and
+                        cloud_region_id):
+                    continue
+                rel_link_data_list = \
+                    self._get_aai_rel_link_data(
+                        data=region,
+                        related_to='complex',
+                        search_key='complex.physical-location-id')
+                if len(rel_link_data_list) > 1:
+                    LOG.error(_LE("Region {} has more than one complex").
+                              format(cloud_region_id))
+                    LOG.debug("Region {}: {}".format(cloud_region_id, region))
+                    continue
+                rel_link_data = rel_link_data_list[0]
+                complex_id = rel_link_data.get("d_value")
+                complex_link = rel_link_data.get("link")
+                if complex_id and complex_link:
+                    complex_info = self._get_complex(
+                        complex_link=complex_link,
+                        complex_id=complex_id)
+                else:  # no complex information
+                    LOG.error(_LE("Region {} does not reference a complex").
+                              format(cloud_region_id))
+                    continue
+                if not complex_info:
+                    LOG.error(_LE("Region {}, complex {} info not found, "
+                                  "link {}").format(cloud_region_id,
+                                                    complex_id, complex_link))
+                    continue
+
+                latitude = complex_info.get('latitude')
+                longitude = complex_info.get('longitude')
+                complex_name = complex_info.get('complex-name')
+                city = complex_info.get('city')
+                state = complex_info.get('state')
+                region = complex_info.get('region')
+                country = complex_info.get('country')
+                if not (complex_name and latitude and longitude
+                        and city and region and country):
+                    keys = ('latitude', 'longitude', 'city',
+                            'complex-name', 'region', 'country')
+                    missing_keys = \
+                        list(set(keys).difference(complex_info.keys()))
+                    LOG.error(_LE("Complex {} is missing {}, link: {}").
+                              format(complex_id, missing_keys, complex_link))
+                    LOG.debug("Complex {}: {}".
+                              format(complex_id, complex_info))
+                    continue
+                cache['cloud_region'][cloud_region_id] = {
+                    'cloud_region_version': cloud_region_version,
+                    'cloud_owner': cloud_owner,
+                    'complex': {
+                        'complex_id': complex_id,
+                        'complex_name': complex_name,
+                        'latitude': latitude,
+                        'longitude': longitude,
+                        'city': city,
+                        'state': state,
+                        'region': region,
+                        'country': country,
+                    }
+                }
+            self._aai_cache = cache
+            self.last_refresh_time = time.time()
+            LOG.info(_LI("**** A&AI cache refresh complete *****"))
+
+    # Helper functions to parse the relationships that
+    # AAI uses to tie information together. This should ideally be
+    # handled with libraries built for graph databases. Needs more
+    # exploration for such libraries.
+    @staticmethod
+    def _get_aai_rel_link(data, related_to):
+        """Given an A&AI data structure, return the related-to link"""
+        rel_dict = data.get('relationship-list')
+        if rel_dict:
+            for key, rel_list in rel_dict.items():
+                for rel in rel_list:
+                    if related_to == rel.get('related-to'):
+                        return rel.get('related-link')
+
+    @staticmethod
+    def _get_aai_rel_link_data(data, related_to, search_key=None,
+                               match_dict=None):
+        # some strings that we will encounter frequently
+        rel_lst = "relationship-list"
+        rkey = "relationship-key"
+        rval = "relationship-value"
+        rdata = "relationship-data"
+        response = list()
+        if match_dict:
+            m_key = match_dict.get('key')
+            m_value = match_dict.get('value')
+        else:
+            m_key = None
+            m_value = None
+        rel_dict = data.get(rel_lst)
+        if rel_dict:  # check if data has relationship lists
+            for key, rel_list in rel_dict.items():
+                for rel in rel_list:
+                    if rel.get("related-to") == related_to:
+                        dval = None
+                        matched = False
+                        link = rel.get("related-link")
+                        r_data = rel.get(rdata, [])
+                        if search_key:
+                            for rd in r_data:
+                                if rd.get(rkey) == search_key:
+                                    dval = rd.get(rval)
+                                    if not match_dict:  # return first match
+                                        response.append(
+                                            {"link": link, "d_value": dval}
+                                        )
+                                        break  # go to next relation
+                                if rd.get(rkey) == m_key \
+                                        and rd.get(rval) == m_value:
+                                    matched = True
+                            if match_dict and matched:  # if matching required
+                                response.append(
+                                    {"link": link, "d_value": dval}
+                                )
+                                # matched, return search value corresponding
+                                # to the matched r_data group
+                        else:  # no search key; just return the link
+                            response.append(
+                                {"link": link, "d_value": dval}
+                            )
+        if len(response) == 0:
+            response.append(
+                {"link": None, "d_value": None}
+            )
+        return response
+
+    def _get_complex(self, complex_link, complex_id=None):
+        if not self.complex_last_refresh_time or \
+           (time.time() - self.complex_last_refresh_time) > \
+           self.complex_cache_refresh_interval * 60:
+            self._aai_complex_cache.clear()
+        if complex_id and complex_id in self._aai_complex_cache:
+            return self._aai_complex_cache[complex_id]
+        else:
+            path = self._aai_versioned_path(
+                self._get_aai_path_from_link(complex_link))
+            response = self._request(
+                path=path, context="complex", value=complex_id)
+            if response is None:
+                return
+            if response.status_code == 200:
+                complex_info = response.json()
+                if 'complex' in complex_info:
+                    complex_info = complex_info.get('complex')
+                latitude = complex_info.get('latitude')
+                longitude = complex_info.get('longitude')
+                complex_name = complex_info.get('complex-name')
+                city = complex_info.get('city')
+                region = complex_info.get('region')
+                country = complex_info.get('country')
+                if not (complex_name and latitude and longitude
+                        and city and region and country):
+                    keys = ('latitude', 'longitude', 'city',
+                            'complex-name', 'region', 'country')
+                    missing_keys = \
+                        list(set(keys).difference(complex_info.keys()))
+                    LOG.error(_LE("Complex {} is missing {}, link: {}").
+                              format(complex_id, missing_keys, complex_link))
+                    LOG.debug("Complex {}: {}".
+                              format(complex_id, complex_info))
+                    return
+
+                if complex_id:  # cache only if complex_id is given
+                    self._aai_complex_cache[complex_id] = response.json()
+                    self.complex_last_refresh_time = time.time()
+
+                return complex_info
+
+    def _get_regions(self):
+        self._refresh_cache()
+        regions = self._aai_cache.get('cloud_region', {})
+        return regions
+
+    def _get_aai_path_from_link(self, link):
+        path = link.split(self.version)
+        if not path or len(path) <= 1:
+            # TODO(shankar): Treat this as a critical error?
+            LOG.error(_LE("A&AI version {} not found in link {}").
+                      format(self.version, link))
+        else:
+            return "{}?depth=0".format(path[1])
+
+    def check_network_roles(self, network_role_id=None):
+        # the network role query from A&AI is not using
+        # the version number in the query
+        network_role_uri = \
+            '/network/l3-networks?network-role=' + network_role_id
+        path = self._aai_versioned_path(network_role_uri)
+        network_role_id = network_role_id
+
+        # This UUID is usually reserved by A&AI for a Conductor-specific named query.
+        named_query_uid = ""
+
+        data = {
+            "query-parameters": {
+                "named-query": {
+                    "named-query-uuid": named_query_uid
+                }
+            },
+            "instance-filters": {
+                "instance-filter": [
+                    {
+                        "l3-network": {
+                            "network-role": network_role_id
+                        }
+                    }
+                ]
+            }
+        }
+        region_ids = set()
+        response = self._request('get', path=path, data=data,
+                                 context="role", value=network_role_id)
+        if response is None:
+            return None
+        body = response.json()
+
+        response_items = body.get('l3-network', [])
+
+        for item in response_items:
+            cloud_region_instances = self._get_aai_rel_link_data(
+                data=item,
+                related_to='cloud-region',
+                search_key='cloud-region.cloud-region-id'
+            )
+
+            if len(cloud_region_instances) > 0:
+                for r_instance in cloud_region_instances:
+                    region_id = r_instance.get('d_value')
+                    if region_id is not None:
+                        region_ids.add(region_id)
+
+        # return region ids that fit the role
+        return region_ids
+
+    def resolve_host_location(self, host_name):
+        path = self._aai_versioned_path('/query?format=id')
+        data = {"start": ["network/pnfs/pnf/" + host_name,
+                "cloud-infrastructure/pservers/pserver/" + host_name],
+                "query": "query/ucpe-instance"
+                }
+        response = self._request('put', path=path, data=data,
+                                 context="host name", value=host_name)
+        if response is None or response.status_code != 200:
+            return None
+        body = response.json()
+        results = body.get('results', [])
+        complex_link = None
+        for result in results:
+            if "resource-type" in result and \
+                    "resource-link" in result and \
+                    result["resource-type"] == "complex":
+                complex_link = result["resource-link"]
+        if not complex_link:
+            LOG.error(_LE("Unable to get a complex link for hostname {} "
+                          " in response {}").format(host_name, response))
+            return None
+        complex_info = self._get_complex(
+            complex_link=complex_link,
+            complex_id=None
+        )
+        if complex_info:
+            lat = complex_info.get('latitude')
+            lon = complex_info.get('longitude')
+            if lat and lon:
+                location = {"latitude": lat, "longitude": lon}
+                return location
+            else:
+                LOG.error(_LE("Unable to get a latitude and longitude "
+                              "information for hostname {} from complex "
+                              " link {}").format(host_name, complex_link))
+                return None
+        else:
+            LOG.error(_LE("Unable to get a complex information for "
+                          " hostname {} from complex "
+                          " link {}").format(host_name, complex_link))
+            return None
+
+    def resolve_clli_location(self, clli_name):
+        clli_uri = '/cloud-infrastructure/complexes/complex/' + clli_name
+        path = self._aai_versioned_path(clli_uri)
+
+        response = self._request('get', path=path, data=None,
+                                 context="clli name", value=clli_name)
+        if response is None or response.status_code != 200:
+            return None
+
+        body = response.json()
+
+        if body:
+            lat = body.get('latitude')
+            lon = body.get('longitude')
+            if lat and lon:
+                location = {"latitude": lat, "longitude": lon}
+                return location
+            else:
+                LOG.error(_LE("Unable to get a latitude and longitude "
+                              "information for CLLI code {} from complex").
+                          format(clli_name))
+            return None
+
+    def get_inventory_group_pairs(self, service_description):
+        pairs = list()
+        path = self._aai_versioned_path(
+            '/network/instance-groups/?description={}&depth=0'.format(
+                service_description))
+        response = self._request(path=path, context="inventory group",
+                                 value=service_description)
+        if response is None or response.status_code != 200:
+            return
+        body = response.json()
+        if "instance-group" not in body:
+            LOG.error(_LE("Unable to get instance groups from inventory "
+                          " in response {}").format(response))
+            return
+        for instance_groups in body["instance-group"]:
+            s_instances = self._get_aai_rel_link_data(
+                data=instance_groups,
+                related_to='service-instance',
+                search_key='service-instance.service-instance-id'
+            )
+            if s_instances and len(s_instances) == 2:
+                pair = list()
+                for s_inst in s_instances:
+                    pair.append(s_inst.get('d_value'))
+                pairs.append(pair)
+            else:
+                LOG.error(_LE("Number of instance pairs not found to "
+                              "be two: {}").format(instance_groups))
+        return pairs
+
+    def _log_multiple_item_error(self, name, service_type,
+                                 related_to, search_key='',
+                                 context=None, value=None):
+        """Helper method to log multiple-item errors
+
+        Used by resolve_demands
+        """
+        LOG.error(_LE("Demand {}, role {} has more than one {} ({})").
+                  format(name, service_type, related_to, search_key))
+        if context and value:
+            LOG.debug("{} details: {}".format(context, value))
+
+    def check_sriov_automation(self, aic_version, demand_name, candidate_name):
+
+        """Check if specific candidate has SRIOV automation available or not
+
+        Used by resolve_demands
+        """
+
+        if aic_version:
+            LOG.debug(_LI("Demand {}, candidate {} has an AIC version "
+                          "number {}").format(demand_name, candidate_name,
+                                              aic_version)
+                      )
+            if aic_version == "3.6":
+                return True
+        return False
+
+    def check_orchestration_status(self, orchestration_status, demand_name, candidate_name):
+
+        """Check if the orchestration-status of a candidate is activated
+
+        Used by resolve_demands
+        """
+
+        if orchestration_status:
+            LOG.debug(_LI("Demand {}, candidate {} has an orchestration "
+                          "status {}").format(demand_name, candidate_name,
+                                              orchestration_status))
+            if orchestration_status.lower() == "activated":
+                return True
+        return False
+
+    def match_candidate_attribute(self, candidate, attribute_name,
+                                  restricted_value, demand_name,
+                                  inventory_type):
+        """Check if specific candidate attribute matches the restricted value
+
+        Used by resolve_demands
+        """
+        if restricted_value and \
+           restricted_value is not '' and \
+           candidate[attribute_name] != restricted_value:
+            LOG.info(_LI("Demand: {} "
+                         "Discarded {} candidate as "
+                         "it doesn't match the "
+                         "{} attribute "
+                         "{} ").format(demand_name,
+                                       inventory_type,
+                                       attribute_name,
+                                       restricted_value
+                                       )
+                     )
+            return True
+        return False
+
+    def match_vserver_attribute(self, vserver_list):
+
+        value = None
+        for i in range(0, len(vserver_list)):
+            if value and \
+               value != vserver_list[i].get('d_value'):
+                return False
+            value = vserver_list[i].get('d_value')
+        return True
+
+    def resolve_demands(self, demands):
+        """Resolve demands into inventory candidate lists"""
+
+        resolved_demands = {}
+        for name, requirements in demands.items():
+            resolved_demands[name] = []
+            for requirement in requirements:
+                inventory_type = requirement.get('inventory_type').lower()
+                service_type = requirement.get('service_type')
+                # service_id = requirement.get('service_id')
+                customer_id = requirement.get('customer_id')
+
+                # region_id is OPTIONAL. This will restrict the initial
+                # candidate set to come from the given region id
+                restricted_region_id = requirement.get('region')
+                restricted_complex_id = requirement.get('complex')
+
+                # get required candidates from the demand
+                required_candidates = requirement.get("required_candidates")
+                if required_candidates:
+                    resolved_demands['required_candidates'] = \
+                        required_candidates
+
+                # get excluded candidate from the demand
+                excluded_candidates = requirement.get("excluded_candidates")
+
+                # service_resource_id is OPTIONAL and is
+                # transparent to Conductor
+                service_resource_id = requirement.get('service_resource_id') \
+                    if requirement.get('service_resource_id') else ''
+
+                # add all the candidates of cloud type
+                if inventory_type == 'cloud':
+                    # load region candidates from cache
+                    regions = self._get_regions()
+
+                    if not regions or len(regions) < 1:
+                        LOG.debug("Region information is not "
+                                  "available in cache")
+                    for region_id, region in regions.items():
+                        # Pick only candidates from the restricted_region
+
+                        candidate = dict()
+                        candidate['inventory_provider'] = 'aai'
+                        candidate['service_resource_id'] = service_resource_id
+                        candidate['inventory_type'] = 'cloud'
+                        candidate['candidate_id'] = region_id
+                        candidate['location_id'] = region_id
+                        candidate['location_type'] = 'att_aic'
+                        candidate['cost'] = 0
+                        candidate['cloud_region_version'] = \
+                            self._get_version_from_string(
+                                region['cloud_region_version'])
+                        candidate['cloud_owner'] = \
+                            region['cloud_owner']
+                        candidate['physical_location_id'] = \
+                            region['complex']['complex_id']
+                        candidate['complex_name'] = \
+                            region['complex']['complex_name']
+                        candidate['latitude'] = \
+                            region['complex']['latitude']
+                        candidate['longitude'] = \
+                            region['complex']['longitude']
+                        candidate['city'] = \
+                            region['complex']['city']
+                        candidate['state'] = \
+                            region['complex']['state']
+                        candidate['region'] = \
+                            region['complex']['region']
+                        candidate['country'] = \
+                            region['complex']['country']
+
+                        if self.check_sriov_automation(
+                                candidate['cloud_region_version'], name,
+                                candidate['candidate_id']):
+                            candidate['sriov_automation'] = 'true'
+                        else:
+                            candidate['sriov_automation'] = 'false'
+
+                        if self.match_candidate_attribute(
+                                candidate, "candidate_id",
+                                restricted_region_id, name,
+                                inventory_type) or \
+                           self.match_candidate_attribute(
+                               candidate, "physical_location_id",
+                               restricted_complex_id, name,
+                               inventory_type):
+                            continue
+
+                        # Pick only candidates not in the excluded list
+                        # if excluded candidate list is provided
+                        if excluded_candidates:
+                            has_excluded_candidate = False
+                            for excluded_candidate in excluded_candidates:
+                                if excluded_candidate \
+                                   and excluded_candidate.get('inventory_type') == \
+                                   candidate.get('inventory_type') \
+                                   and excluded_candidate.get('candidate_id') == \
+                                   candidate.get('candidate_id'):
+                                    has_excluded_candidate = True
+                                    break
+
+                            if has_excluded_candidate:
+                                continue
+
+                        # Pick only candidates in the required list
+                        # if required candidate list is provided
+                        if required_candidates:
+                            has_required_candidate = False
+                            for required_candidate in required_candidates:
+                                if required_candidate \
+                                   and required_candidate.get('inventory_type') \
+                                   == candidate.get('inventory_type') \
+                                   and required_candidate.get('candidate_id') \
+                                   == candidate.get('candidate_id'):
+                                    has_required_candidate = True
+                                    break
+
+                            if not has_required_candidate:
+                                continue
+
+                        # add candidate to demand candidates
+                        resolved_demands[name].append(candidate)
+
+                elif inventory_type == 'service' \
+                        and service_type and customer_id:
+                    # First level query to get the list of generic vnfs
+                    path = self._aai_versioned_path(
+                        '/network/generic-vnfs/'
+                        '?prov-status=PROV&equipment-role={}&depth=0'.format(service_type))
+                    response = self._request(
+                        path=path, context="demand, GENERIC-VNF role",
+                        value="{}, {}".format(name, service_type))
+                    if response is None or response.status_code != 200:
+                        continue  # move ahead with next requirement
+                    body = response.json()
+                    generic_vnf = body.get("generic-vnf", [])
+                    for vnf in generic_vnf:
+                        # create a default candidate
+                        candidate = dict()
+                        candidate['inventory_provider'] = 'aai'
+                        candidate['service_resource_id'] = service_resource_id
+                        candidate['inventory_type'] = 'service'
+                        candidate['candidate_id'] = ''
+                        candidate['location_id'] = ''
+                        candidate['location_type'] = 'att_aic'
+                        candidate['host_id'] = ''
+                        candidate['cost'] = 0
+                        candidate['cloud_owner'] = ''
+                        candidate['cloud_region_version'] = ''
+
+                        # start populating the candidate
+                        candidate['host_id'] = vnf.get("vnf-name")
+
+                        # check orchestration-status attribute, only keep Activated candidate
+                        if (not self.check_orchestration_status(
+                                vnf.get("orchestration-status"), name, candidate['host_id'])):
+                            continue
+
+                        related_to = "vserver"
+                        search_key = "cloud-region.cloud-owner"
+                        rl_data_list = self._get_aai_rel_link_data(
+                            data=vnf, related_to=related_to,
+                            search_key=search_key)
+
+                        if len(rl_data_list) > 1:
+                            if not self.match_vserver_attribute(rl_data_list):
+                                self._log_multiple_item_error(
+                                    name, service_type, related_to, search_key,
+                                    "GENERIC-VNF", vnf)
+                                continue
+                        rl_data = rl_data_list[0]
+
+                        vs_link_list = list()
+                        for i in range(0, len(rl_data_list)):
+                            vs_link_list.append(rl_data_list[i].get('link'))
+
+                        candidate['cloud_owner'] = rl_data.get('d_value')
+
+                        search_key = "cloud-region.cloud-region-id"
+
+                        rl_data_list = self._get_aai_rel_link_data(
+                            data=vnf,
+                            related_to=related_to,
+                            search_key=search_key
+                        )
+                        if len(rl_data_list) > 1:
+                            if not self.match_vserver_attribute(rl_data_list):
+                                self._log_multiple_item_error(
+                                    name, service_type, related_to, search_key,
+                                    "GENERIC-VNF", vnf)
+                                continue
+                        rl_data = rl_data_list[0]
+                        cloud_region_id = rl_data.get('d_value')
+                        candidate['location_id'] = cloud_region_id
+
+                        # get AIC version for service candidate
+                        if cloud_region_id:
+                            cloud_region_uri = '/cloud-infrastructure/cloud-regions' \
+                                               '/?cloud-region-id=' \
+                                               + cloud_region_id
+                            path = self._aai_versioned_path(cloud_region_uri)
+
+                            response = self._request('get',
+                                                     path=path,
+                                                     data=None)
+                            if response is None or response.status_code != 200:
+                                return None
+
+                            body = response.json()
+                            regions = body.get('cloud-region', [])
+
+                            for region in regions:
+                                if "cloud-region-version" in region:
+                                    candidate['cloud_region_version'] = \
+                                        self._get_version_from_string(
+                                            region["cloud-region-version"])
+
+                        if self.check_sriov_automation(
+                                candidate['cloud_region_version'], name,
+                                candidate['host_id']):
+                            candidate['sriov_automation'] = 'true'
+                        else:
+                            candidate['sriov_automation'] = 'false'
+
+                        related_to = "service-instance"
+                        search_key = "customer.global-customer-id"
+                        match_key = "customer.global-customer-id"
+                        rl_data_list = self._get_aai_rel_link_data(
+                            data=vnf,
+                            related_to=related_to,
+                            search_key=search_key,
+                            match_dict={'key': match_key,
+                                        'value': customer_id}
+                        )
+                        if len(rl_data_list) > 1:
+                            if not self.match_vserver_attribute(rl_data_list):
+                                self._log_multiple_item_error(
+                                    name, service_type, related_to, search_key,
+                                    "GENERIC-VNF", vnf)
+                                continue
+                        rl_data = rl_data_list[0]
+                        vs_cust_id = rl_data.get('d_value')
+
+                        search_key = "service-instance.service-instance-id"
+                        match_key = "customer.global-customer-id"
+                        rl_data_list = self._get_aai_rel_link_data(
+                            data=vnf,
+                            related_to=related_to,
+                            search_key=search_key,
+                            match_dict={'key': match_key,
+                                        'value': customer_id}
+                        )
+                        if len(rl_data_list) > 1:
+                            if not self.match_vserver_attribute(rl_data_list):
+                                self._log_multiple_item_error(
+                                    name, service_type, related_to, search_key,
+                                    "GENERIC-VNF", vnf)
+                                continue
+                        rl_data = rl_data_list[0]
+                        vs_service_instance_id = rl_data.get('d_value')
+
+                        if vs_cust_id and vs_cust_id == customer_id:
+                            candidate['candidate_id'] = \
+                                vs_service_instance_id
+                        else:  # vserver is for a different customer
+                            continue
+
+                        # Second level query to get the pserver from vserver
+                        complex_list = list()
+
+                        for vs_link in vs_link_list:
+
+                            if not vs_link:
+                                LOG.error(_LE("{} VSERVER link information not "
+                                              "available from A&AI").format(name))
+                                LOG.debug("Related link data: {}".format(rl_data))
+                                continue  # move ahead with the next vnf
+
+                            vs_path = self._get_aai_path_from_link(vs_link)
+                            if not vs_path:
+                                LOG.error(_LE("{} VSERVER path information not "
+                                              "available from A&AI - {}").
+                                          format(name, vs_path))
+                                continue  # move ahead with the next vnf
+                            path = self._aai_versioned_path(vs_path)
+                            response = self._request(
+                                path=path, context="demand, VSERVER",
+                                value="{}, {}".format(name, vs_path))
+                            if response is None or response.status_code != 200:
+                                continue
+                            body = response.json()
+
+                            related_to = "pserver"
+                            rl_data_list = self._get_aai_rel_link_data(
+                                data=body,
+                                related_to=related_to,
+                                search_key=None
+                            )
+                            if len(rl_data_list) > 1:
+                                self._log_multiple_item_error(
+                                    name, service_type, related_to, "item",
+                                    "VSERVER", body)
+                                continue
+                            rl_data = rl_data_list[0]
+                            ps_link = rl_data.get('link')
+
+                            # Third level query to get cloud region from pserver
+                            if not ps_link:
+                                LOG.error(_LE("{} pserver related link "
+                                              "not found in A&AI: {}").
+                                          format(name, rl_data))
+                                continue
+                            ps_path = self._get_aai_path_from_link(ps_link)
+                            if not ps_path:
+                                LOG.error(_LE("{} pserver path information "
+                                              "not found in A&AI: {}").
+                                          format(name, ps_link))
+                                continue  # move ahead with the next vnf
+                            path = self._aai_versioned_path(ps_path)
+                            response = self._request(
+                                path=path, context="PSERVER", value=ps_path)
+                            if response is None or response.status_code != 200:
+                                continue
+                            body = response.json()
+
+                            related_to = "complex"
+                            search_key = "complex.physical-location-id"
+                            rl_data_list = self._get_aai_rel_link_data(
+                                data=body,
+                                related_to=related_to,
+                                search_key=search_key
+                            )
+                            if len(rl_data_list) > 1:
+                                if not self.match_vserver_attribute(rl_data_list):
+                                    self._log_multiple_item_error(
+                                        name, service_type, related_to, search_key,
+                                        "PSERVER", body)
+                                    continue
+                            rl_data = rl_data_list[0]
+                            complex_list.append(rl_data)
+
+                        if not complex_list or \
+                            len(complex_list) < 1:
+                            LOG.error("Complex information not "
+                                          "available from A&AI")
+                            continue
+
+                        if len(complex_list) > 1:
+                            if not self.match_vserver_attribute(complex_list):
+                                self._log_multiple_item_error(
+                                    name, service_type, related_to, search_key,
+                                    "GENERIC-VNF", vnf)
+                                continue
+
+                        rl_data = complex_list[0]
+                        complex_link = rl_data.get('link')
+                        complex_id = rl_data.get('d_value')
+
+                        # Final query for the complex information
+                        if not (complex_link and complex_id):
+                            LOG.debug("{} complex information not "
+                                      "available from A&AI - {}".
+                                      format(name, complex_link))
+                            continue  # move ahead with the next vnf
+                        else:
+                            complex_info = self._get_complex(
+                                complex_link=complex_link,
+                                complex_id=complex_id
+                            )
+                            if not complex_info:
+                                LOG.debug("{} complex information not "
+                                          "available from A&AI - {}".
+                                          format(name, complex_link))
+                                continue  # move ahead with the next vnf
+                            candidate['physical_location_id'] = \
+                                complex_id
+                            candidate['complex_name'] = \
+                                complex_info.get('complex-name')
+                            candidate['latitude'] = \
+                                complex_info.get('latitude')
+                            candidate['longitude'] = \
+                                complex_info.get('longitude')
+                            candidate['state'] = \
+                                complex_info.get('state')
+                            candidate['country'] = \
+                                complex_info.get('country')
+                            candidate['city'] = \
+                                complex_info.get('city')
+                            candidate['region'] = \
+                                complex_info.get('region')
+
+                        # Pick only candidates not in the excluded list
+                        # if excluded candidate list is provided
+                        if excluded_candidates:
+                            has_excluded_candidate = False
+                            for excluded_candidate in excluded_candidates:
+                                if excluded_candidate \
+                                        and excluded_candidate.get('inventory_type') == \
+                                        candidate.get('inventory_type') \
+                                        and excluded_candidate.get('candidate_id') == \
+                                        candidate.get('candidate_id'):
+                                    has_excluded_candidate = True
+                                    break
+
+                            if has_excluded_candidate:
+                                continue
+
+                        # Pick only candidates in the required list
+                        # if required candidate list is provided
+                        if required_candidates:
+                            has_required_candidate = False
+                            for required_candidate in required_candidates:
+                                if required_candidate \
+                                        and required_candidate.get('inventory_type') \
+                                        == candidate.get('inventory_type') \
+                                        and required_candidate.get('candidate_id') \
+                                        == candidate.get('candidate_id'):
+                                    has_required_candidate = True
+                                    break
+
+                            if not has_required_candidate:
+                                continue
+
+                        # add the candidate to the demand
+                        # Pick only candidates from the restricted_region
+                        # or restricted_complex
+                        if self.match_candidate_attribute(
+                                candidate,
+                                "location_id",
+                                restricted_region_id,
+                                name,
+                                inventory_type) or \
+                           self.match_candidate_attribute(
+                               candidate,
+                               "physical_location_id",
+                               restricted_complex_id,
+                               name,
+                               inventory_type):
+                            continue
+                        else:
+                            resolved_demands[name].append(candidate)
+                else:
+                    LOG.error("Unknown inventory_type "
+                              " {}".format(inventory_type))
+
+        return resolved_demands
diff --git a/conductor/conductor/data/plugins/inventory_provider/base.py b/conductor/conductor/data/plugins/inventory_provider/base.py
new file mode 100644 (file)
index 0000000..8afb090
--- /dev/null
@@ -0,0 +1,42 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import abc
+
+from oslo_log import log
+import six
+
+from conductor.data.plugins import base
+
+LOG = log.getLogger(__name__)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class InventoryProviderBase(base.DataPlugin):
+    """Base class for Inventory Provider plugins"""
+
+    @abc.abstractmethod
+    def name(self):
+        """Return human-readable name."""
+        pass
+
+    @abc.abstractmethod
+    def resolve_demands(self, demands):
+        """Resolve demands into inventory candidate lists"""
+        pass
diff --git a/conductor/conductor/data/plugins/inventory_provider/extensions.py b/conductor/conductor/data/plugins/inventory_provider/extensions.py
new file mode 100644 (file)
index 0000000..18f4c4b
--- /dev/null
@@ -0,0 +1,45 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from oslo_log import log
+import stevedore
+
+from conductor.conf import inventory_provider
+from conductor.i18n import _LI
+
+LOG = log.getLogger(__name__)
+
+inventory_provider.register_extension_manager_opts()
+
+
+class Manager(stevedore.named.NamedExtensionManager):
+    """Manage Inventory Provider extensions."""
+
+    def __init__(self, conf, namespace):
+        super(Manager, self).__init__(
+            namespace, conf.inventory_provider.extensions,
+            invoke_on_load=True, name_order=True)
+        LOG.info(_LI("Loaded inventory provider extensions: %s"), self.names())
+
+    def initialize(self):
+        """Initialize enabled inventory provider extensions."""
+        for extension in self.extensions:
+            LOG.info(_LI("Initializing inventory provider extension '%s'"),
+                     extension.name)
+            extension.obj.initialize()
diff --git a/conductor/conductor/data/plugins/service_controller/__init__.py b/conductor/conductor/data/plugins/service_controller/__init__.py
new file mode 100644 (file)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/data/plugins/service_controller/base.py b/conductor/conductor/data/plugins/service_controller/base.py
new file mode 100644 (file)
index 0000000..ad00c98
--- /dev/null
@@ -0,0 +1,42 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import abc
+
+from oslo_log import log
+import six
+
+from conductor.data.plugins import base
+
+LOG = log.getLogger(__name__)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ServiceControllerBase(base.DataPlugin):
+    """Base class for Service Controller plugins"""
+
+    @abc.abstractmethod
+    def name(self):
+        """Return human-readable name."""
+        pass
+
+    @abc.abstractmethod
+    def filter_candidates(self, candidates):
+        """Reduce candidate list based on SDN-C intelligence"""
+        pass
diff --git a/conductor/conductor/data/plugins/service_controller/extensions.py b/conductor/conductor/data/plugins/service_controller/extensions.py
new file mode 100644 (file)
index 0000000..f309102
--- /dev/null
@@ -0,0 +1,45 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from oslo_log import log
+import stevedore
+
+from conductor.conf import service_controller
+from conductor.i18n import _LI
+
+LOG = log.getLogger(__name__)
+
+service_controller.register_extension_manager_opts()
+
+
+class Manager(stevedore.named.NamedExtensionManager):
+    """Manage Service Controller extensions."""
+
+    def __init__(self, conf, namespace):
+        super(Manager, self).__init__(
+            namespace, conf.service_controller.extensions,
+            invoke_on_load=True, name_order=True)
+        LOG.info(_LI("Loaded service controller extensions: %s"), self.names())
+
+    def initialize(self):
+        """Initialize enabled service controller extensions."""
+        for extension in self.extensions:
+            LOG.info(_LI("Initializing service controller extension '%s'"),
+                     extension.name)
+            extension.obj.initialize()
diff --git a/conductor/conductor/data/plugins/service_controller/sdnc.py b/conductor/conductor/data/plugins/service_controller/sdnc.py
new file mode 100644 (file)
index 0000000..23968f0
--- /dev/null
@@ -0,0 +1,126 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import time
+
+from oslo_config import cfg
+from oslo_log import log
+
+from conductor.common import rest
+from conductor.data.plugins.service_controller import base
+from conductor.i18n import _LE
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+SDNC_OPTS = [
+    cfg.IntOpt('cache_refresh_interval',
+               default=1440,
+               help='Interval with which to refresh the local cache, '
+                    'in minutes.'),
+    cfg.StrOpt('table_prefix',
+               default='sdnc',
+               help='Data Store table prefix.'),
+    cfg.StrOpt('server_url',
+               default='https://controller:8443/restconf/',
+               help='Base URL for SDN-C, up to and including the version.'),
+    cfg.StrOpt('username',
+               help='Basic Authentication Username'),
+    cfg.StrOpt('password',
+               help='Basic Authentication Password'),
+    cfg.StrOpt('sdnc_rest_timeout',
+               default=60,
+               help='Timeout for SDNC Rest Call'),
+    cfg.StrOpt('sdnc_retries',
+               default=3,
+               help='Retry Numbers for SDNC Rest Call'),
+]
+
+CONF.register_opts(SDNC_OPTS, group='sdnc')
+
+
+class SDNC(base.ServiceControllerBase):
+    """SDN Service Controller"""
+
+    def __init__(self):
+        """Initializer"""
+
+        # FIXME(jdandrea): Pass this in to init.
+        self.conf = CONF
+
+        self.base = self.conf.sdnc.server_url.rstrip('/')
+        self.password = self.conf.sdnc.password
+        self.timeout = self.conf.sdnc.sdnc_rest_timeout
+        self.verify = False
+        self.retries = self.conf.sdnc.sdnc_retries
+        self.username = self.conf.sdnc.username
+
+        kwargs = {
+            "server_url": self.base,
+            "retries": self.retries,
+            "username": self.username,
+            "password": self.password,
+            "log_debug": self.conf.debug,
+        }
+        self.rest = rest.REST(**kwargs)
+
+        # Not sure what info from SDNC is cacheable
+        self._sdnc_cache = {}
+
+    def initialize(self):
+        """Perform any late initialization."""
+        pass
+
+    def name(self):
+        """Return human-readable name."""
+        return "SDN-C"
+
+    def _request(self, method='get', path='/', data=None,
+                 context=None, value=None):
+        """Performs HTTP request."""
+        kwargs = {
+            "method": method,
+            "path": path,
+            "data": data,
+        }
+
+        # TODO(jdandrea): Move timing/response logging into the rest helper?
+        start_time = time.time()
+        response = self.rest.request(**kwargs)
+        elapsed = time.time() - start_time
+        LOG.debug("Total time for SDN-C request "
+                  "({0:}: {1:}): {2:.3f} sec".format(context, value, elapsed))
+
+        if response is None:
+            LOG.error(_LE("No response from SDN-C ({}: {})").
+                      format(context, value))
+        elif response.status_code != 200:
+            LOG.error(_LE("SDN-C request ({}: {}) returned HTTP "
+                          "status {} {}, link: {}{}").
+                      format(context, value,
+                             response.status_code, response.reason,
+                             self.base, path))
+        return response
+
+    def filter_candidates(self, request, candidate_list,
+                          constraint_name, constraint_type):
+        """Reduce candidate list based on SDN-C intelligence"""
+        selected_candidates = candidate_list
+        return selected_candidates
diff --git a/conductor/conductor/data/service.py b/conductor/conductor/data/service.py
new file mode 100644 (file)
index 0000000..33d467f
--- /dev/null
@@ -0,0 +1,460 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+# import json
+# import os
+
+import cotyledon
+from oslo_config import cfg
+from oslo_log import log
+# from stevedore import driver
+
+# from conductor import __file__ as conductor_root
+from conductor.common.music import messaging as music_messaging
+from conductor.data.plugins.inventory_provider import extensions as ip_ext
+from conductor.data.plugins.service_controller import extensions as sc_ext
+from conductor.i18n import _LE, _LI, _LW
+from conductor import messaging
+# from conductor.solver.resource import region
+# from conductor.solver.resource import service
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+DATA_OPTS = [
+    cfg.IntOpt('workers',
+               default=1,
+               min=1,
+               help='Number of workers for data service. '
+                    'Default value is 1.'),
+    cfg.BoolOpt('concurrent',
+                default=False,
+                help='Set to True when data will run in active-active '
+                     'mode. When set to False, data will flush any abandoned '
+                     'messages at startup.'),
+]
+
+CONF.register_opts(DATA_OPTS, group='data')
+
+
+class DataServiceLauncher(object):
+    """Listener for the data service."""
+
+    def __init__(self, conf):
+        """Initializer."""
+        self.conf = conf
+        self.init_extension_managers(conf)
+
+    def init_extension_managers(self, conf):
+        """Initialize extension managers."""
+        self.ip_ext_manager = (
+            ip_ext.Manager(conf, 'conductor.inventory_provider.plugin'))
+        self.ip_ext_manager.initialize()
+        self.sc_ext_manager = (
+            sc_ext.Manager(conf, 'conductor.service_controller.plugin'))
+        self.sc_ext_manager.initialize()
+
+    def run(self):
+        transport = messaging.get_transport(self.conf)
+        if transport:
+            topic = "data"
+            target = music_messaging.Target(topic=topic)
+            endpoints = [DataEndpoint(self.ip_ext_manager,
+                                      self.sc_ext_manager), ]
+            flush = not self.conf.data.concurrent
+            kwargs = {'transport': transport,
+                      'target': target,
+                      'endpoints': endpoints,
+                      'flush': flush, }
+            svcmgr = cotyledon.ServiceManager()
+            svcmgr.add(music_messaging.RPCService,
+                       workers=self.conf.data.workers,
+                       args=(self.conf,), kwargs=kwargs)
+            svcmgr.run()
+
+
+class DataEndpoint(object):
+    def __init__(self, ip_ext_manager, sc_ext_manager):
+
+        self.ip_ext_manager = ip_ext_manager
+        self.sc_ext_manager = sc_ext_manager
+        self.plugin_cache = {}
+
+    def get_candidate_location(self, ctx, arg):
+        # candidates should have lat long info already
+        error = False
+        location = None
+        candidate = arg["candidate"]
+        lat = candidate.get('latitude', None)
+        lon = candidate.get('longitude', None)
+        if lat and lon:
+            location = (float(lat), float(lon))
+        else:
+            error = True
+        return {'response': location, 'error': error}
+
+    def get_candidate_zone(self, ctx, arg):
+        candidate = arg["candidate"]
+        category = arg["category"]
+        zone = None
+        error = False
+
+        if category == 'region':
+            zone = candidate['location_id']
+        elif category == 'complex':
+            zone = candidate['complex_name']
+        else:
+            error = True
+
+        if error:
+            LOG.error(_LE("Unresolvable zone category {}").format(category))
+        else:
+            LOG.info(_LI("Candidate zone is {}").format(zone))
+        return {'response': zone, 'error': error}
+
+    def get_candidates_from_service(self, ctx, arg):
+        candidate_list = arg["candidate_list"]
+        constraint_name = arg["constraint_name"]
+        constraint_type = arg["constraint_type"]
+        # inventory_type = arg["inventory_type"]
+        controller = arg["controller"]
+        request = arg["request"]
+        # cost = arg["cost"]
+        error = False
+        filtered_candidates = []
+        # call service and fetch candidates
+        # TODO(jdandrea): Get rid of the SDN-C reference (outside of plugin!)
+        if controller == "SDN-C":
+            service_model = request.get("service_model")
+            results = self.sc_ext_manager.map_method(
+                'filter_candidates',
+                request=request,
+                candidate_list=candidate_list,
+                constraint_name=constraint_name,
+                constraint_type=constraint_type
+            )
+            if results and len(results) > 0:
+                filtered_candidates = results[0]
+            else:
+                LOG.warn(
+                    _LW("No candidates returned by service "
+                        "controller: {}; may be a new service "
+                        "instantiation.").format(controller))
+        else:
+            LOG.error(_LE("Unknown service controller: {}").format(controller))
+        # if response from service controller is empty
+        if filtered_candidates is None:
+            LOG.error("No capacity found from SDN-GC for candidates: "
+                      "{}".format(candidate_list))
+            return {'response': [], 'error': error}
+        else:
+            LOG.debug("Filtered candidates: {}".format(filtered_candidates))
+            candidate_list = [c for c in candidate_list
+                              if c in filtered_candidates]
+            return {'response': candidate_list, 'error': error}
+
+    def get_candidate_discard_set(self, value, candidate_list, value_attrib):
+        discard_set = set()
+        value_dict = value
+        value_condition = ''
+        if value_dict:
+            if "all" in value_dict:
+                value_list = value_dict.get("all")
+                value_condition = "all"
+            elif "any" in value_dict:
+                value_list = value_dict.get("any")
+                value_condition = "any"
+
+            if not value_list:
+                return discard_set
+
+            for candidate in candidate_list:
+                c_any = False
+                c_all = True
+                for value in value_list:
+                    if candidate.get(value_attrib) == value:
+                        c_any = True  # include if any one is met
+                    elif candidate.get(value_attrib) != value:
+                        c_all = False  # discard even if one is not met
+                if value_condition == 'any' and not c_any:
+                    discard_set.add(candidate.get("candidate_id"))
+                elif value_condition == 'all' and not c_all:
+                    discard_set.add(candidate.get("candidate_id"))
+        return discard_set
+
+    def get_inventory_group_candidates(self, ctx, arg):
+        candidate_list = arg["candidate_list"]
+        resolved_candidate = arg["resolved_candidate"]
+        candidate_names = []
+        error = False
+        service_description = 'DHV_VVIG_PAIR'
+        results = self.ip_ext_manager.map_method(
+            'get_inventory_group_pairs',
+            service_description=service_description
+        )
+        if not results or len(results) < 1:
+            LOG.error(
+                _LE("Empty inventory group response for service: {}").format(
+                    service_description))
+            error = True
+        else:
+            pairs = results[0]
+            if not pairs or len(pairs) < 1:
+                LOG.error(
+                    _LE("No inventory group candidates found for service: {}, "
+                        "inventory provider: {}").format(
+                        service_description, self.ip_ext_manager.names()[0]))
+                error = True
+            else:
+                LOG.debug(
+                    "Inventory group pairs: {}, service: {}, "
+                    "inventory provider: {}".format(
+                        pairs, service_description,
+                        self.ip_ext_manager.names()[0]))
+                for pair in pairs:
+                    if resolved_candidate.get("candidate_id") == pair[0]:
+                        candidate_names.append(pair[1])
+                    elif resolved_candidate.get("candidate_id") == pair[1]:
+                        candidate_names.append(pair[0])
+
+        candidate_list = [c for c in candidate_list
+                          if c["candidate_id"] in candidate_names]
+        LOG.info(
+            _LI("Inventory group candidates: {}, service: {}, "
+                "inventory provider: {}").format(
+                candidate_list, service_description,
+                self.ip_ext_manager.names()[0]))
+        return {'response': candidate_list, 'error': error}
+
+    def get_candidates_by_attributes(self, ctx, arg):
+        candidate_list = arg["candidate_list"]
+        # demand_name = arg["demand_name"]
+        properties = arg["properties"]
+        discard_set = set()
+
+        attributes_to_evaluate = properties.get('evaluate')
+        for attrib, value in attributes_to_evaluate.items():
+            if value == '':
+                continue
+            if attrib == 'network_roles':
+                role_candidates = dict()
+                role_list = []
+                nrc_dict = value
+                role_condition = ''
+                if nrc_dict:
+                    if "all" in nrc_dict:
+                        role_list = nrc_dict.get("all")
+                        role_condition = "all"
+                    elif "any" in nrc_dict:
+                        role_list = nrc_dict.get("any")
+                        role_condition = "any"
+
+                    # if the role_list is empty do nothing
+                    if not role_list or role_list == '':
+                        LOG.error(
+                            _LE("No roles available, "
+                                "inventory provider: {}").format(
+                                self.ip_ext_manager.names()[0]))
+                        continue
+                    for role in role_list:
+                        # query inventory provider to check if
+                        # the candidate is in role
+                        results = self.ip_ext_manager.map_method(
+                            'check_network_roles',
+                            network_role_id=role
+                        )
+                        if not results or len(results) < 1:
+                            LOG.error(
+                                _LE("Empty response from inventory "
+                                    "provider {} for network role {}").format(
+                                    self.ip_ext_manager.names()[0], role))
+                            continue
+                        region_ids = results[0]
+                        if not region_ids:
+                            LOG.error(
+                                _LE("No candidates from inventory provider {} "
+                                    "for network role {}").format(
+                                    self.ip_ext_manager.names()[0], role))
+                            continue
+                        LOG.debug(
+                            "Network role candidates: {}, role: {},"
+                            "inventory provider: {}".format(
+                                region_ids, role,
+                                self.ip_ext_manager.names()[0]))
+                        role_candidates[role] = region_ids
+
+                    # find candidates that meet conditions
+                    for candidate in candidate_list:
+                        # perform this check only for cloud candidates
+                        if candidate["inventory_type"] != "cloud":
+                            continue
+                        c_any = False
+                        c_all = True
+                        for role in role_list:
+                            if role not in role_candidates:
+                                c_all = False
+                                continue
+                            rc = role_candidates.get(role)
+                            if rc and candidate.get("candidate_id") not in rc:
+                                c_all = False
+                                # discard even if one role is not met
+                            elif rc and candidate.get("candidate_id") in rc:
+                                c_any = True
+                                # include if any one role is met
+                        if role_condition == 'any' and not c_any:
+                            discard_set.add(candidate.get("candidate_id"))
+                        elif role_condition == 'all' and not c_all:
+                            discard_set.add(candidate.get("candidate_id"))
+
+            elif attrib == 'complex':
+                v_discard_set = \
+                    self.get_candidate_discard_set(
+                        value=value,
+                        candidate_list=candidate_list,
+                        value_attrib="complex_name")
+                discard_set.update(v_discard_set)
+            elif attrib == "country":
+                v_discard_set = \
+                    self.get_candidate_discard_set(
+                        value=value,
+                        candidate_list=candidate_list,
+                        value_attrib="country")
+                discard_set.update(v_discard_set)
+            elif attrib == "state":
+                v_discard_set = \
+                    self.get_candidate_discard_set(
+                        value=value,
+                        candidate_list=candidate_list,
+                        value_attrib="state")
+                discard_set.update(v_discard_set)
+            elif attrib == "region":
+                v_discard_set = \
+                    self.get_candidate_discard_set(
+                        value=value,
+                        candidate_list=candidate_list,
+                        value_attrib="region")
+                discard_set.update(v_discard_set)
+
+        # return candidates not in discard set
+        candidate_list[:] = [c for c in candidate_list
+                             if c['candidate_id'] not in discard_set]
+        LOG.info(
+            "Available candidates after attribute checks: {}, "
+            "inventory provider: {}".format(
+                candidate_list, self.ip_ext_manager.names()[0]))
+        return {'response': candidate_list, 'error': False}
+
+    def resolve_demands(self, ctx, arg):
+        error = False
+        demands = arg.get('demands')
+        resolved_demands = None
+        results = self.ip_ext_manager.map_method(
+            'resolve_demands',
+            demands
+        )
+        if results and len(results) > 0:
+            resolved_demands = results[0]
+        else:
+            error = True
+
+        return {'response': {'resolved_demands': resolved_demands},
+                'error': error}
+
+    def resolve_location(self, ctx, arg):
+
+        error = False
+        resolved_location = None
+
+        host_name = arg.get('host_name')
+        clli_code = arg.get('clli_code')
+
+        if host_name:
+            results = self.ip_ext_manager.map_method(
+                'resolve_host_location',
+                host_name
+            )
+
+        elif clli_code:
+            results = self.ip_ext_manager.map_method(
+                'resolve_clli_location',
+                clli_code
+            )
+        else:
+            # unknown location response
+            LOG.error(_LE("Unknown location type from the input template."
+                          "Expected location types are host_name"
+                          " or clli_code."))
+
+        if results and len(results) > 0:
+            resolved_location = results[0]
+        else:
+            error = True
+        return {'response': {'resolved_location': resolved_location},
+                'error': error}
+
+    def call_reservation_operation(self, ctx, arg):
+        result = True
+        reserved_candidates = None
+        method = arg["method"]
+        candidate_list = arg["candidate_list"]
+        reservation_name = arg["reservation_name"]
+        reservation_type = arg["reservation_type"]
+        controller = arg["controller"]
+        request = arg["request"]
+
+        if controller == "SDN-C":
+            results = self.sc_ext_manager.map_method(
+                'call_reservation_operation',
+                method=method,
+                candidate_list=candidate_list,
+                reservation_name=reservation_name,
+                reservation_type=reservation_type,
+                request=request
+            )
+            if results and len(results) > 0:
+                reserved_candidates = results[0]
+        else:
+            LOG.error(_LE("Unknown service controller: {}").format(controller))
+        if reserved_candidates is None or not reserved_candidates:
+            result = False
+            LOG.debug(
+                _LW("Unable to {} for "
+                    "candidate {}.").format(method, reserved_candidates))
+            return {'response': result,
+                    'error': not result}
+        else:
+            LOG.debug("{} for the candidate: "
+                      "{}".format(method, reserved_candidates))
+            return {'response': result,
+                    'error': not result}
+
+    # def do_something(self, ctx, arg):
+    #     """RPC endpoint for data messages
+    #
+    #     When another service sends a notification over the message
+    #     bus, this method receives it.
+    #     """
+    #     LOG.debug("Got a message!")
+    #
+    #     res = {
+    #         'note': 'do_something called!',
+    #         'arg': str(arg),
+    #     }
+    #     return {'response': res, 'error': False}
diff --git a/conductor/conductor/reservation/__init__.py b/conductor/conductor/reservation/__init__.py
new file mode 100644 (file)
index 0000000..e615a9c
--- /dev/null
@@ -0,0 +1,20 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from .service import ReservationServiceLauncher  # noqa: F401
diff --git a/conductor/conductor/reservation/service.py b/conductor/conductor/reservation/service.py
new file mode 100644 (file)
index 0000000..c2b0ba8
--- /dev/null
@@ -0,0 +1,370 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import cotyledon
+from oslo_config import cfg
+from oslo_log import log
+
+from conductor.common.models import plan
+from conductor.common.music import api
+from conductor.common.music import messaging as music_messaging
+from conductor.common.music.model import base
+from conductor.i18n import _LE, _LI
+from conductor import messaging
+from conductor import service
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+reservation_OPTS = [
+    cfg.IntOpt('workers',
+               default=1,
+               min=1,
+               help='Number of workers for reservation service. '
+                    'Default value is 1.'),
+    cfg.IntOpt('reserve_retries',
+               default=3,
+               help='Number of times reservation/release '
+                    'should be attempted.'),
+    cfg.IntOpt('reserve_counter',
+               default=3,
+               help='Number of times a plan should'
+                    'be attempted to reserve.'),
+    cfg.BoolOpt('concurrent',
+                default=False,
+                help='Set to True when reservation will run in active-active '
+                     'mode. When set to False, reservation will restart any '
+                     'orphaned reserving requests at startup.'),
+]
+
+CONF.register_opts(reservation_OPTS, group='reservation')
+
+# Pull in service opts. We use them here.
+OPTS = service.OPTS
+CONF.register_opts(OPTS)
+
+
+class ReservationServiceLauncher(object):
+    """Launcher for the reservation service."""
+
+    def __init__(self, conf):
+        self.conf = conf
+
+        # Set up Music access.
+        self.music = api.API()
+        self.music.keyspace_create(keyspace=conf.keyspace)
+
+        # Dynamically create a plan class for the specified keyspace
+        self.Plan = base.create_dynamic_model(
+            keyspace=conf.keyspace, baseclass=plan.Plan, classname="Plan")
+
+        if not self.Plan:
+            raise
+
+    def run(self):
+        kwargs = {'plan_class': self.Plan}
+        svcmgr = cotyledon.ServiceManager()
+        svcmgr.add(ReservationService,
+                   workers=self.conf.reservation.workers,
+                   args=(self.conf,), kwargs=kwargs)
+        svcmgr.run()
+
+
+class ReservationService(cotyledon.Service):
+    """reservation service."""
+
+    # This will appear in 'ps xaf'
+    name = "Conductor Reservation"
+
+    def __init__(self, worker_id, conf, **kwargs):
+        """Initializer"""
+        LOG.debug("%s" % self.__class__.__name__)
+        super(ReservationService, self).__init__(worker_id)
+        self._init(conf, **kwargs)
+        self.running = True
+
+    def _init(self, conf, **kwargs):
+        """Set up the necessary ingredients."""
+        self.conf = conf
+        self.kwargs = kwargs
+
+        self.Plan = kwargs.get('plan_class')
+
+        # Set up the RPC service(s) we want to talk to.
+        self.data_service = self.setup_rpc(conf, "data")
+
+        # Set up Music access.
+        self.music = api.API()
+
+        # Number of retries for reservation/release
+        self.reservation_retries = self.conf.reservation.reserve_retries
+        self.reservation_counter = self.conf.reservation.reserve_counter
+
+        if not self.conf.reservation.concurrent:
+            self._reset_reserving_status()
+
+    def _gracefully_stop(self):
+        """Gracefully stop working on things"""
+        pass
+
+    def _reset_reserving_status(self):
+        """Reset plans being reserved so they can be reserved again.
+
+        Use this only when the reservation service is not running concurrently.
+        """
+        plans = self.Plan.query.all()
+        for the_plan in plans:
+            if the_plan.status == self.Plan.RESERVING:
+                the_plan.status = self.Plan.SOLVED
+                the_plan.update()
+
+    def _restart(self):
+        """Prepare to restart the service"""
+        pass
+
+    def setup_rpc(self, conf, topic):
+        """Set up the RPC Client"""
+        # TODO(jdandrea): Put this pattern inside music_messaging?
+        transport = messaging.get_transport(conf=conf)
+        target = music_messaging.Target(topic=topic)
+        client = music_messaging.RPCClient(conf=conf,
+                                           transport=transport,
+                                           target=target)
+        return client
+
+    def try_reservation_call(self, method, candidate_list,
+                             reservation_name, reservation_type,
+                             controller, request):
+        # Call data service for reservation
+        # need to do this for self.reserve_retries times
+        ctxt = {}
+        args = {'method': method,
+                'candidate_list': candidate_list,
+                'reservation_name': reservation_name,
+                'reservation_type': reservation_type,
+                'controller': controller,
+                'request': request
+                }
+
+        method_name = "call_reservation_operation"
+        attempt_count = 1
+        while attempt_count <= self.reservation_retries:
+            is_success = self.data_service.call(ctxt=ctxt,
+                                                method=method_name,
+                                                args=args)
+            LOG.debug("Attempt #{} calling method {} for candidate "
+                      "{} - response: {}".format(attempt_count,
+                                                 method,
+                                                 candidate_list,
+                                                 is_success))
+            if is_success:
+                return True
+            attempt_count += 1
+        return False
+
+    def rollback_reservation(self, reservation_list):
+        """Function to rollback(release) reservations"""
+        # TODO(snarayanan): Need to test this once the API is ready
+        for reservation in reservation_list:
+            candidate_list = reservation['candidate_list']
+            reservation_name = reservation['reservation_name']
+            reservation_type = reservation['reservation_type']
+            controller = reservation['controller']
+            request = reservation['request']
+
+            is_success = self.try_reservation_call(
+                method="release",
+                candidate_list=candidate_list,
+                reservation_name=reservation_name,
+                reservation_type=reservation_type,
+                controller=controller,
+                request=request
+            )
+            if not is_success:
+                # rollback failed report error to SDNC
+                message = _LE("Unable to release reservation "
+                              "{}").format(reservation)
+                LOG.error(message)
+                return False
+                # move to the next reserved candidate
+        return True
+
+    def run(self):
+        """Run"""
+        LOG.debug("%s" % self.__class__.__name__)
+        # TODO(snarayanan): This is really meant to be a control loop
+        # As long as self.running is true, we process another request.
+        while self.running:
+            # plans = Plan.query().all()
+            # Find the first plan with a status of SOLVED.
+            # Change its status to RESERVING.
+
+            solution = None
+            translation = None
+            # requests_to_reserve = dict()
+            plans = self.Plan.query.all()
+            found_solved_template = False
+
+            for p in plans:
+                if p.status == self.Plan.SOLVED:
+                    solution = p.solution
+                    translation = p.translation
+                    found_solved_template = True
+                    break
+            if found_solved_template and not solution:
+                message = _LE("Plan {} status is solved, yet "
+                              "the solution wasn't found").format(p.id)
+                LOG.error(message)
+                p.status = self.Plan.ERROR
+                p.message = message
+                p.update()
+                continue  # continue looping
+            elif not solution:
+                continue  # continue looping
+
+            # update status to reserving
+            p.status = self.Plan.RESERVING
+            p.update()
+
+            # begin reservations
+            # if plan needs reservation proceed with reservation
+            # else set status to done.
+            reservations = None
+            if translation:
+                conductor_solver = translation.get("conductor_solver")
+                if conductor_solver:
+                    reservations = conductor_solver.get("reservations")
+                else:
+                    LOG.error("no conductor_solver in "
+                              "translation for Plan {}".format(p.id))
+
+            if reservations:
+                counter = reservations.get("counter") + 1
+                reservations['counter'] = counter
+                if counter <= self.reservation_counter:
+                    recommendations = solution.get("recommendations")
+                    reservation_list = list()
+
+                    for reservation, resource in reservations.get("demands",
+                                                                  {}).items():
+                        candidates = list()
+                        reservation_demand = resource.get("demand")
+                        reservation_name = resource.get("name")
+                        reservation_type = resource.get("type")
+
+                        reservation_properties = resource.get("properties")
+                        if reservation_properties:
+                            controller = reservation_properties.get(
+                                "controller")
+                            request = reservation_properties.get("request")
+
+                        for recommendation in recommendations:
+                            for demand, r_resource in recommendation.items():
+                                if demand == reservation_demand:
+                                    # get selected candidate from translation
+                                    selected_candidate_id = \
+                                        r_resource.get("candidate")\
+                                        .get("candidate_id")
+                                    demands = \
+                                        translation.get("conductor_solver")\
+                                        .get("demands")
+                                    for demand_name, d_resource in \
+                                            demands.items():
+                                        if demand_name == demand:
+                                            for candidate in d_resource\
+                                                    .get("candidates"):
+                                                if candidate\
+                                                    .get("candidate_id") ==\
+                                                        selected_candidate_id:
+                                                    candidates\
+                                                        .append(candidate)
+
+                        is_success = self.try_reservation_call(
+                            method="reserve",
+                            candidate_list=candidates,
+                            reservation_name=reservation_name,
+                            reservation_type=reservation_type,
+                            controller=controller,
+                            request=request)
+
+                        # if reservation succeeds continue with next candidate
+                        if is_success:
+                            curr_reservation = dict()
+                            curr_reservation['candidate_list'] = candidates
+                            curr_reservation['reservation_name'] = \
+                                reservation_name
+                            curr_reservation['reservation_type'] = \
+                                reservation_type
+                            curr_reservation['controller'] = controller
+                            curr_reservation['request'] = request
+                            reservation_list.append(curr_reservation)
+                        else:
+                            # begin roll back of all reserved resources on
+                            # the first failed reservation
+                            rollback_status = \
+                                self.rollback_reservation(reservation_list)
+                            # statuses
+                            if rollback_status:
+                                # released all reservations,
+                                # move plan to translated
+                                p.status = self.Plan.TRANSLATED
+                                p.update()
+                                del reservation_list[:]
+                            else:
+                                LOG.error("Reservation rollback failed")
+                                p.status = self.Plan.ERROR
+                                p.message = "Reservation release failed"
+                                p.update()
+                            break  # reservation failed
+
+                        continue
+                        # continue with reserving the next candidate
+                else:
+                    LOG.error("Tried {} times. Plan {} is unable to make"
+                              "reservation "
+                              .format(self.reservation_counter, p.id))
+                    p.status = self.Plan.ERROR
+                    p.message = "Reservation failed"
+                    p.update()
+                    continue
+
+            # verify if all candidates have been reserved
+            if p.status == self.Plan.RESERVING:
+                # all reservations succeeded.
+                LOG.info(_LI("Plan {} Reservation complete").
+                         format(p.id))
+                LOG.debug("Plan {} Reservation complete".format(p.id))
+                p.status = self.Plan.DONE
+                p.update()
+
+            continue
+            # done reserving continue to loop
+
+    def terminate(self):
+        """Terminate"""
+        LOG.debug("%s" % self.__class__.__name__)
+        self.running = False
+        self._gracefully_stop()
+        super(ReservationService, self).terminate()
+
+    def reload(self):
+        """Reload"""
+        LOG.debug("%s" % self.__class__.__name__)
+        self._restart()
diff --git a/conductor/conductor/solver/__init__.py b/conductor/conductor/solver/__init__.py
new file mode 100644 (file)
index 0000000..ff501ef
--- /dev/null
@@ -0,0 +1,20 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from .service import SolverServiceLauncher  # noqa: F401
diff --git a/conductor/conductor/solver/optimizer/__init__.py b/conductor/conductor/solver/optimizer/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/optimizer/best_first.py b/conductor/conductor/solver/optimizer/best_first.py
new file mode 100755 (executable)
index 0000000..65e435d
--- /dev/null
@@ -0,0 +1,163 @@
+#!/bin/python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+import copy
+import operator
+from oslo_log import log
+import sys
+
+from conductor.solver.optimizer import decision_path as dpath
+from conductor.solver.optimizer import search
+
+LOG = log.getLogger(__name__)
+
+
+class BestFirst(search.Search):
+
+    def __init__(self, conf):
+        search.Search.__init__(self, conf)
+
+    def search(self, _demand_list, _objective):
+        dlist = copy.deepcopy(_demand_list)
+        heuristic_solution = self._search_by_fit_first(dlist, _objective)
+        if heuristic_solution is None:
+            LOG.debug("no solution")
+            return None
+
+        open_list = []
+        close_paths = {}
+
+        ''' for the decision length heuristic '''
+        # current_decision_length = 0
+
+        # create root path
+        decision_path = dpath.DecisionPath()
+        decision_path.set_decisions({})
+
+        # insert the root path into open_list
+        open_list.append(decision_path)
+
+        while len(open_list) > 0:
+            p = open_list.pop(0)
+
+            ''' for the decision length heuristic '''
+            # dl = len(p.decisions)
+            # if dl >= current_decision_length:
+            #     current_decision_length = dl
+            # else:
+            #     continue
+
+            # if explored all demands in p, complete the search with p
+            unexplored_demand = self._get_new_demand(p, _demand_list)
+            if unexplored_demand is None:
+                return p
+
+            p.current_demand = unexplored_demand
+
+            msg = "demand = {}, decisions = {}, value = {}"
+            LOG.debug(msg.format(p.current_demand.name,
+                                 p.decision_id, p.total_value))
+
+            # constraint solving
+            candidate_list = self._solve_constraints(p)
+            if len(candidate_list) > 0:
+                for candidate in candidate_list:
+                    # create path for each candidate for given demand
+                    np = dpath.DecisionPath()
+                    np.set_decisions(p.decisions)
+                    np.decisions[p.current_demand.name] = candidate
+                    _objective.compute(np)
+
+                    valid_candidate = True
+
+                    # check closeness for this decision
+                    np.set_decision_id(p, candidate.name)
+                    if np.decision_id in close_paths.keys():
+                        valid_candidate = False
+
+                    ''' for base comparison heuristic '''
+                    # TODO(gjung): how to know this is about min
+                    if _objective.goal == "min":
+                        if np.total_value >= heuristic_solution.total_value:
+                            valid_candidate = False
+
+                    if valid_candidate is True:
+                        open_list.append(np)
+
+                # sort open_list by value
+                open_list.sort(key=operator.attrgetter("total_value"))
+            else:
+                LOG.debug("no candidates")
+
+            # insert p into close_paths
+            close_paths[p.decision_id] = p
+
+        return heuristic_solution
+
+    def _get_new_demand(self, _p, _demand_list):
+        for demand in _demand_list:
+            if demand.name not in _p.decisions.keys():
+                return demand
+
+        return None
+
+    def _search_by_fit_first(self, _demand_list, _objective):
+        decision_path = dpath.DecisionPath()
+        decision_path.set_decisions({})
+
+        return self._find_current_best(_demand_list, _objective, decision_path)
+
+    def _find_current_best(self, _demand_list, _objective, _decision_path):
+        if len(_demand_list) == 0:
+            LOG.debug("search done")
+            return _decision_path
+
+        demand = _demand_list.pop(0)
+        LOG.debug("demand = {}".format(demand.name))
+        _decision_path.current_demand = demand
+        candidate_list = self._solve_constraints(_decision_path)
+
+        bound_value = 0.0
+        if _objective.goal == "min":
+            bound_value = sys.float_info.max
+
+        while True:
+            best_resource = None
+            for candidate in candidate_list:
+                _decision_path.decisions[demand.name] = candidate
+                _objective.compute(_decision_path)
+                if _objective.goal == "min":
+                    if _decision_path.total_value < bound_value:
+                        bound_value = _decision_path.total_value
+                        best_resource = candidate
+
+            if best_resource is None:
+                LOG.debug("no resource, rollback")
+                return None
+            else:
+                _decision_path.decisions[demand.name] = best_resource
+                _decision_path.total_value = bound_value
+                decision_path = self._find_current_best(
+                    _demand_list, _objective, _decision_path)
+                if decision_path is None:
+                    candidate_list.remove(best_resource)
+                else:
+                    return decision_path
diff --git a/conductor/conductor/solver/optimizer/constraints/__init__.py b/conductor/conductor/solver/optimizer/constraints/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/optimizer/constraints/access_distance.py b/conductor/conductor/solver/optimizer/constraints/access_distance.py
new file mode 100755 (executable)
index 0000000..7c400b8
--- /dev/null
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+import operator
+from oslo_log import log
+
+from conductor.solver.optimizer.constraints import constraint
+from conductor.solver.utils import utils
+
+LOG = log.getLogger(__name__)
+
+
+class AccessDistance(constraint.Constraint):
+    def __init__(self, _name, _type, _demand_list, _priority=0,
+                 _comparison_operator=operator.le,
+                 _threshold=None, _location=None):
+        constraint.Constraint.__init__(
+            self, _name, _type, _demand_list, _priority)
+
+        # The distance threshold for the constraint
+        self.distance_threshold = _threshold
+        # The comparison operator from the constraint.
+        self.comparison_operator = _comparison_operator
+        # This has to be reference to a function
+        # from the python operator class
+        self.location = _location  # Location instance
+
+    def solve(self, _decision_path, _candidate_list, _request):
+        if _candidate_list is None:
+            LOG.debug("Empty candidate list, need to get " +
+                      "the candidate list for the demand/service")
+            return _candidate_list
+        conflict_list = []
+        cei = _request.cei
+        for candidate in _candidate_list:
+            air_distance = utils.compute_air_distance(
+                self.location.value,
+                cei.get_candidate_location(candidate))
+            if not self.comparison_operator(air_distance,
+                                            self.distance_threshold):
+                if candidate not in conflict_list:
+                    conflict_list.append(candidate)
+
+        _candidate_list = \
+            [c for c in _candidate_list if c not in conflict_list]
+        # self.distance_threshold
+        # cei = _request.constraint_engine_interface
+        # _candidate_list = \
+        #     [candidate for candidate in _candidate_list if \
+        #     (self.comparison_operator(
+        #          utils.compute_air_distance(self.location.value,
+        #              cei.get_candidate_location(candidate)),
+        #          self.distance_threshold))]
+
+        # # This section may be relevant ONLY when the candidate list
+        # # of two demands are identical and we want to optimize the solver
+        # # to winnow the candidate list of the current demand based on
+        # # whether this constraint will be met for other demands
+        #
+        # # local candidate list
+        # tmp_candidate_list = copy.deepcopy(_candidate_list)
+        # for candidate in tmp_candidate_list:
+        #     # TODO(snarayanan): Check if the location type matches
+        #     # the candidate location type
+        #     # if self.location.loc_type != candidate_location.loc_type:
+        #     #    LOG.debug("Mismatch in the location types being compared.")
+        #
+        #
+        #     satisfies_all_demands = True
+        #     for demand in self.demand_list:
+        #         # Ideally candidate should be in resources for
+        #         # current demand if the candidate list is generated
+        #         # from the demand.resources
+        #         # However, this may not be guaranteed for other demands.
+        #         if candidate not in demand.resources:
+        #             LOG.debug("Candidate not in the demand's resources")
+        #             satisfies_all_demands = False
+        #             break
+        #
+        #         candidate_location = demand.resources[candidate].location
+        #
+        #         if not self.comparison_operator(utils.compute_air_distance(
+        #                 self.location.value, candidate_location),
+        #                  self.distance_threshold):
+        #             # can we assume that the type of candidate_location
+        #             # will be compatible with location.value ?
+        #             satisfies_all_demands = False
+        #             break
+        #
+        #     if not satisfies_all_demands:
+        #         _candidate_list.remove(candidate)
+
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/constraints/attribute.py b/conductor/conductor/solver/optimizer/constraints/attribute.py
new file mode 100644 (file)
index 0000000..18f9332
--- /dev/null
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+# python imports
+
+# Conductor imports
+from conductor.solver.optimizer.constraints import constraint
+
+# Third-party library imports
+from oslo_log import log
+
+LOG = log.getLogger(__name__)
+
+
+class Attribute(constraint.Constraint):
+    def __init__(self, _name, _type, _demand_list, _priority=0,
+                 _properties=None):
+        constraint.Constraint.__init__(
+            self, _name, _type, _demand_list, _priority)
+        self.properties = _properties
+
+    def solve(self, _decision_path, _candidate_list, _request):
+        # call conductor engine with request parameters
+        cei = _request.cei
+        demand_name = _decision_path.current_demand.name
+        select_list = cei.get_candidates_by_attributes(demand_name,
+                                                       _candidate_list,
+                                                       self.properties)
+        _candidate_list[:] = \
+            [c for c in _candidate_list if c in select_list]
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/constraints/cloud_distance.py b/conductor/conductor/solver/optimizer/constraints/cloud_distance.py
new file mode 100755 (executable)
index 0000000..1e862d4
--- /dev/null
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+import operator
+from oslo_log import log
+
+from conductor.solver.optimizer.constraints import constraint
+from conductor.solver.utils import utils
+
+LOG = log.getLogger(__name__)
+
+
+class CloudDistance(constraint.Constraint):
+    def __init__(self, _name, _type, _demand_list, _priority=0,
+                 _comparison_operator=operator.le, _threshold=None):
+        constraint.Constraint.__init__(
+            self, _name, _type, _demand_list, _priority)
+        self.distance_threshold = _threshold
+        self.comparison_operator = _comparison_operator
+        if len(_demand_list) <= 1:
+            LOG.debug("Insufficient number of demands.")
+            raise ValueError
+
+    def solve(self, _decision_path, _candidate_list, _request):
+        conflict_list = []
+
+        # get the list of candidates filtered from the previous demand
+        solved_demands = list()  # demands that have been solved in the past
+        decision_list = list()
+        future_demands = list()  # demands that will be solved in future
+
+        # LOG.debug("initial candidate list {}".format(_candidate_list.name))
+
+        # find previously made decisions for the constraint's demand list
+        for demand in self.demand_list:
+            # decision made for demand
+            if demand in _decision_path.decisions:
+                solved_demands.append(demand)
+                # only one candidate expected per demand in decision path
+                decision_list.append(
+                    _decision_path.decisions[demand])
+            else:  # decision will be made in future
+                future_demands.append(demand)
+                # placeholder for any optimization we may
+                # want to do for demands in the constraint's demand
+                # list that conductor will solve in the future
+
+        # LOG.debug("decisions = {}".format(decision_list))
+
+        # temp copy to iterate
+        # temp_candidate_list = copy.deepcopy(_candidate_list)
+        # for candidate in temp_candidate_list:
+        for candidate in _candidate_list:
+            # check if candidate satisfies constraint
+            # for all relevant decisions thus far
+            is_candidate = True
+            for filtered_candidate in decision_list:
+                cei = _request.cei
+                if not self.comparison_operator(
+                        utils.compute_air_distance(
+                            cei.get_candidate_location(candidate),
+                            cei.get_candidate_location(filtered_candidate)),
+                        self.distance_threshold):
+                    is_candidate = False
+
+            if not is_candidate:
+                if candidate not in conflict_list:
+                    conflict_list.append(candidate)
+
+        _candidate_list = \
+            [c for c in _candidate_list if c not in conflict_list]
+
+        # msg = "final candidate list for demand {} is "
+        # LOG.debug(msg.format(_decision_path.current_demand.name))
+        # for c in _candidate_list:
+        #    LOG.debug("    " + c.name)
+
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/constraints/constraint.py b/conductor/conductor/solver/optimizer/constraints/constraint.py
new file mode 100755 (executable)
index 0000000..03e2c33
--- /dev/null
@@ -0,0 +1,50 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import abc
+
+from oslo_log import log
+import six
+
+LOG = log.getLogger(__name__)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Constraint(object):
+    """Base class for Constraints"""
+
+    def __init__(self, _name, _type, _demand_list, _priority=0):
+        """Common initializer.
+
+        Be sure to call this superclass when initializing.
+        """
+        self.name = _name
+        self.constraint_type = _type
+        self.demand_list = _demand_list
+        self.check_priority = _priority
+
+    @abc.abstractmethod
+    def solve(self, _decision_path, _candidate_list, _request):
+        """Solve.
+
+        Implement the constraint solving in each inherited class,
+        depending on constraint type.
+        """
+
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/constraints/inventory_group.py b/conductor/conductor/solver/optimizer/constraints/inventory_group.py
new file mode 100755 (executable)
index 0000000..f0f8089
--- /dev/null
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+from oslo_log import log
+
+from constraint import Constraint
+
+LOG = log.getLogger(__name__)
+
+
+class InventoryGroup(Constraint):
+    def __init__(self, _name, _type, _demand_list, _priority=0):
+        Constraint.__init__(self, _name, _type, _demand_list, _priority)
+        if not len(self.demand_list) == 2:
+            LOG.debug("More than two demands in the list")
+            raise ValueError
+
+    def solve(self, _decision_path, _candidate_list, _request):
+
+        # check if other demand in the demand pair has been already solved
+        # other demand in pair
+        other_demand = [d for d in self.demand_list if
+                        d != _decision_path.current_demand.name][0]
+        if other_demand not in _decision_path.decisions:
+            LOG.debug("Other demand not yet resolved, " +
+                      "return the current candidates")
+            return _candidate_list
+        # expect only one candidate per demand in decision
+        resolved_candidate = _decision_path.decisions[other_demand]
+        cei = _request.cei
+        inventory_group_candidates = cei.get_inventory_group_candidates(
+            _candidate_list,
+            _decision_path.current_demand.name,
+            resolved_candidate)
+        _candidate_list = [candidate for candidate in _candidate_list if
+                           (candidate in inventory_group_candidates)]
+
+        '''
+        # Alternate implementation that *may* be more efficient
+        # if the decision path has multiple candidates per solved demand
+        # *and* inventory group is smaller than than the candidate list
+
+        select_list = list()
+        # get candidates for current demand
+        current_demand = _decision_path.current_demand
+        current_candidates = _candidate_list
+
+        # get inventory groups for current demand,
+        # assuming that group information is tied with demand
+        inventory_groups = cei.get_inventory_groups(current_demand)
+
+        for group in inventory_groups:
+            if group[0] in current_candidates and group[1] in other_candidates:
+                # is the symmetric candidacy valid too ?
+                if group[0] not in select_list:
+                    select_list.append(group[0])
+        _candidate_list[:] = [c for c in _candidate_list if c in select_list]
+        '''
+
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/constraints/service.py b/conductor/conductor/solver/optimizer/constraints/service.py
new file mode 100644 (file)
index 0000000..bdbe267
--- /dev/null
@@ -0,0 +1,76 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from oslo_log import log
+
+from conductor.i18n import _LE
+from conductor.solver.optimizer.constraints import constraint
+
+LOG = log.getLogger(__name__)
+
+
+class Service(constraint.Constraint):
+    def __init__(self, _name, _type, _demand_list, _priority=0,
+                 _controller=None, _request=None, _cost=None,
+                 _inventory_type=None):
+        constraint.Constraint.__init__(
+            self, _name, _type, _demand_list, _priority)
+        if _controller is None:
+            LOG.debug("Provider URL not available")
+            raise ValueError
+        self.request = _request
+        self.controller = _controller
+        self.cost = _cost
+        self.inventory_type = _inventory_type
+
+    def solve(self, _decision_path, _candidate_list, _request):
+        select_list = list()
+        candidates_to_check = list()
+        demand_name = _decision_path.current_demand.name
+        # service-check candidates of the same inventory type
+        # select candidate of all other types
+        for candidate in _candidate_list:
+            if self.inventory_type == "cloud":
+                if candidate["inventory_type"] == "cloud":
+                    candidates_to_check.append(candidate)
+                else:
+                    select_list.append(candidate)
+            elif self.inventory_type == "service":
+                if candidate["inventory_type"] == "service":
+                    candidates_to_check.append(candidate)
+                else:
+                    select_list.append(candidate)
+        # call conductor data with request parameters
+        if len(candidates_to_check) > 0:
+            cei = _request.cei
+            filtered_list = cei.get_candidates_from_service(
+                self.name, self.constraint_type, candidates_to_check,
+                self.controller, self.inventory_type, self.request,
+                self.cost, demand_name)
+            for c in filtered_list:
+                select_list.append(c)
+        else:
+            LOG.error(_LE("Constraint {} ({}) has no candidates of "
+                          "inventory type {} for demand {}").format(
+                self.name, self.constraint_type,
+                self.inventory_type, demand_name)
+            )
+
+        _candidate_list[:] = [c for c in _candidate_list if c in select_list]
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/constraints/zone.py b/conductor/conductor/solver/optimizer/constraints/zone.py
new file mode 100755 (executable)
index 0000000..c7a968f
--- /dev/null
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+import operator
+from oslo_log import log
+
+from constraint import Constraint
+
+LOG = log.getLogger(__name__)
+
+
+class Zone(Constraint):
+    def __init__(self, _name, _type, _demand_list, _priority=0,
+                 _qualifier=None, _category=None):
+        Constraint.__init__(self, _name, _type, _demand_list, _priority)
+
+        self.qualifier = _qualifier  # different or same
+        self.category = _category  # disaster, region, or update
+        self.comparison_operator = None
+
+        if self.qualifier == "same":
+            self.comparison_operator = operator.eq
+        elif self.qualifier == "different":
+            self.comparison_operator = operator.ne
+
+    def solve(self, _decision_path, _candidate_list, _request):
+        conflict_list = []
+
+        decision_list = list()
+        # find previously made decisions for the constraint's demand list
+        for demand in self.demand_list:
+            # decision made for demand
+            if demand in _decision_path.decisions:
+                decision_list.append(_decision_path.decisions[demand])
+        # temp copy to iterate
+        # temp_candidate_list = copy.deepcopy(_candidate_list)
+        # for candidate in temp_candidate_list:
+        for candidate in _candidate_list:
+            # check if candidate satisfies constraint
+            # for all relevant decisions thus far
+            is_candidate = True
+            for filtered_candidate in decision_list:
+                cei = _request.cei
+                if not self.comparison_operator(
+                        cei.get_candidate_zone(candidate, self.category),
+                        cei.get_candidate_zone(filtered_candidate,
+                                               self.category)):
+                    is_candidate = False
+
+            if not is_candidate:
+                if candidate not in conflict_list:
+                    conflict_list.append(candidate)
+                    # _candidate_list.remove(candidate)
+
+        _candidate_list[:] =\
+            [c for c in _candidate_list if c not in conflict_list]
+
+        # msg = "final candidate list for demand {} is "
+        # LOG.debug(msg.format(_decision_path.current_demand.name))
+        # for c in _candidate_list:
+        #     print "    " + c.name
+
+        return _candidate_list
diff --git a/conductor/conductor/solver/optimizer/decision_path.py b/conductor/conductor/solver/optimizer/decision_path.py
new file mode 100755 (executable)
index 0000000..0890f52
--- /dev/null
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+import copy
+
+
+class DecisionPath(object):
+
+    def __init__(self):
+        """local copy of decisions so far
+
+        key = demand.name, value = region or service instance
+        """
+
+        self.decisions = None
+
+        ''' to identify this decision path in the search '''
+        self.decision_id = ""
+
+        ''' current demand to be dealt with'''
+        self.current_demand = None
+
+        ''' decision values so far '''
+        self.cumulated_value = 0.0
+        self.cumulated_cost = 0.0
+        self.heuristic_to_go_value = 0.0
+        self.heuristic_to_go_cost = 0.0
+        # cumulated_value + heuristic_to_go_value (if exist)
+        self.total_value = 0.0
+        # cumulated_cost + heuristic_to_go_cost (if exist)
+        self.total_cost = 0.0
+
+    def set_decisions(self, _prior_decisions):
+        self.decisions = copy.deepcopy(_prior_decisions)
+
+    def set_decision_id(self, _dk, _rk):
+        self.decision_id += (str(_dk) + ":" + str(_rk) + ">")
diff --git a/conductor/conductor/solver/optimizer/fit_first.py b/conductor/conductor/solver/optimizer/fit_first.py
new file mode 100755 (executable)
index 0000000..42d8fed
--- /dev/null
@@ -0,0 +1,160 @@
+#!/bin/python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+from oslo_log import log
+import sys
+
+from conductor.solver.optimizer import decision_path as dpath
+from conductor.solver.optimizer import search
+
+LOG = log.getLogger(__name__)
+
+
+class FitFirst(search.Search):
+
+    def __init__(self, conf):
+        search.Search.__init__(self, conf)
+
+    def search(self, _demand_list, _objective, _request):
+        decision_path = dpath.DecisionPath()
+        decision_path.set_decisions({})
+
+        # Begin the recursive serarch
+        return self._find_current_best(
+            _demand_list, _objective, decision_path, _request)
+
+    def _find_current_best(self, _demand_list, _objective,
+                           _decision_path, _request):
+        # _demand_list is common across all recursions
+        if len(_demand_list) == 0:
+            LOG.debug("search done")
+            return _decision_path
+
+        # get next demand to resolve
+        demand = _demand_list.pop(0)
+        LOG.debug("demand = {}".format(demand.name))
+        _decision_path.current_demand = demand
+
+        # call constraints to whittle initial candidates
+        # candidate_list meets all constraints for the demand
+        candidate_list = self._solve_constraints(_decision_path, _request)
+
+        # find the best candidate among the list
+
+        # bound_value keeps track of the max value discovered
+        # thus far for the _decision_path. For every demand
+        # added to the _decision_path bound_value will be set
+        # to a really large value to begin with
+        bound_value = 0.0
+        version_value = "0.0"
+
+        if "min" in _objective.goal:
+            bound_value = sys.float_info.max
+
+        # Start recursive search
+        while True:
+            best_resource = None
+            # Find best candidate that optimizes the cost for demand.
+            # The candidate list can be empty if the constraints
+            # rule out all candidates
+            for candidate in candidate_list:
+                _decision_path.decisions[demand.name] = candidate
+                _objective.compute(_decision_path, _request)
+                # this will set the total_value of the _decision_path
+                # thus far up to the demand
+                if _objective.goal is None:
+                    best_resource = candidate
+
+                elif _objective.goal == "min_cloud_version":
+                    # convert the unicode to string
+                    candidate_version = candidate \
+                        .get("cloud_region_version").encode('utf-8')
+                    if _decision_path.total_value < bound_value or \
+                       (_decision_path.total_value == bound_value and
+                       self._compare_version(candidate_version,
+                                             version_value) > 0):
+                        bound_value = _decision_path.total_value
+                        version_value = candidate_version
+                        best_resource = candidate
+
+                elif _objective.goal == "min":
+                    # if the path value is less than bound value
+                    # we have found the better candidate
+                    if _decision_path.total_value < bound_value:
+                        # relax the bound_value to the value of
+                        # the path - this will ensure a future
+                        # candidate will be picked only if it has
+                        # a value lesser than the current best candidate
+                        bound_value = _decision_path.total_value
+                        best_resource = candidate
+
+            # Rollback if we don't have any candidate picked for
+            # the demand.
+            if best_resource is None:
+                LOG.debug("no resource, rollback")
+                # Put the current demand (which failed to find a
+                # candidate) back in the list so that it can be picked
+                # up in the next iteration of the recursion
+                _demand_list.insert(0, demand)
+                return None  # return None back to the recursion
+            else:
+                # best resource is found, add to the decision path
+                _decision_path.decisions[demand.name] = best_resource
+                _decision_path.total_value = bound_value
+
+                # Begin the next recursive call to find candidate
+                # for the next demand in the list
+                decision_path = self._find_current_best(
+                    _demand_list, _objective, _decision_path, _request)
+
+                # The point of return from the previous recursion.
+                # If the call returns no candidates, no solution exists
+                # in that path of the decision tree. Rollback the
+                # current best_resource and remove it from the list
+                # of potential candidates.
+                if decision_path is None:
+                    candidate_list.remove(best_resource)
+                    # reset bound_value to a large value so that
+                    # the next iteration of the current recursion
+                    # will pick the next best candidate, which
+                    # will have a value larger than the current
+                    # bound_value (proof by contradiction:
+                    # it cannot have a smaller value, if it wasn't
+                    # the best_resource.
+                    if _objective.goal == "min":
+                        bound_value = sys.float_info.max
+                else:
+                    # A candidate was found for the demand, and
+                    # was added to the decision path. Return current
+                    # path back to the recursion.
+                    return decision_path
+
+    def _compare_version(self, version1, version2):
+        version1 = version1.split('.')
+        version2 = version2.split('.')
+        for i in range(max(len(version1), len(version2))):
+            v1 = int(version1[i]) if i < len(version1) else 0
+            v2 = int(version2[i]) if i < len(version2) else 0
+            if v1 > v2:
+                return 1
+            elif v1 < v2:
+                return -1
+        return 0
diff --git a/conductor/conductor/solver/optimizer/greedy.py b/conductor/conductor/solver/optimizer/greedy.py
new file mode 100755 (executable)
index 0000000..eae1b12
--- /dev/null
@@ -0,0 +1,65 @@
+#!/bin/python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+from oslo_log import log
+import sys
+
+from conductor.solver.optimizer import decision_path as dpath
+from conductor.solver.optimizer import search
+
+LOG = log.getLogger(__name__)
+
+
+class Greedy(search.Search):
+
+    def __init__(self, conf):
+        search.Search.__init__(self, conf)
+
+    def search(self, _demand_list, _objective):
+        decision_path = dpath.DecisionPath()
+        decision_path.set_decisions({})
+
+        for demand in _demand_list:
+            LOG.debug("demand = {}".format(demand.name))
+
+            decision_path.current_demand = demand
+            candidate_list = self._solve_constraints(decision_path)
+
+            bound_value = 0.0
+            if _objective.goal == "min":
+                bound_value = sys.float_info.max
+
+            best_resource = None
+            for candidate in candidate_list:
+                decision_path.decisions[demand.name] = candidate
+                _objective.compute(decision_path)
+                if _objective.goal == "min":
+                    if decision_path.total_value < bound_value:
+                        bound_value = decision_path.total_value
+                        best_resource = candidate
+
+            if best_resource is not None:
+                decision_path.decisions[demand.name] = best_resource
+                decision_path.total_value = bound_value
+            else:
+                return None
+
+        return decision_path
diff --git a/conductor/conductor/solver/optimizer/optimizer.py b/conductor/conductor/solver/optimizer/optimizer.py
new file mode 100755 (executable)
index 0000000..c7155c4
--- /dev/null
@@ -0,0 +1,196 @@
+#!/bin/python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+from oslo_config import cfg
+from oslo_log import log
+import time
+
+from conductor import service
+# from conductor.solver.optimizer import decision_path as dpath
+# from conductor.solver.optimizer import best_first
+# from conductor.solver.optimizer import greedy
+from conductor.solver.optimizer import fit_first
+from conductor.solver.optimizer import random_pick
+from conductor.solver.request import demand
+
+LOG = log.getLogger(__name__)
+
+CONF = cfg.CONF
+
+SOLVER_OPTS = [
+
+]
+
+CONF.register_opts(SOLVER_OPTS, group='solver')
+
+
+class Optimizer(object):
+
+    # FIXME(gjung): _requests should be request (no underscore, one item)
+    def __init__(self, conf, _requests=None):
+        self.conf = conf
+
+        # self.search = greedy.Greedy(self.conf)
+        self.search = None
+        # self.search = best_first.BestFirst(self.conf)
+
+        if _requests is not None:
+            self.requests = _requests
+
+    def get_solution(self):
+        LOG.debug("search start")
+
+        for rk in self.requests:
+            request = self.requests[rk]
+            LOG.debug("--- request = {}".format(rk))
+
+            LOG.debug("1. sort demands")
+            demand_list = self._sort_demands(request)
+
+            for d in demand_list:
+                LOG.debug("    demand = {}".format(d.name))
+
+            LOG.debug("2. search")
+            st = time.time()
+
+            if not request.objective.goal:
+                LOG.debug("No objective function is provided. "
+                          "Random pick algorithm is used")
+                self.search = random_pick.RandomPick(self.conf)
+                best_path = self.search.search(demand_list, request)
+            else:
+                LOG.debug("Fit first algorithm is used")
+                self.search = fit_first.FitFirst(self.conf)
+                best_path = self.search.search(demand_list,
+                                               request.objective, request)
+
+            if best_path is not None:
+                self.search.print_decisions(best_path)
+            else:
+                LOG.debug("no solution found")
+            LOG.debug("search delay = {} sec".format(time.time() - st))
+            return best_path
+
+    def _sort_demands(self, _request):
+        demand_list = []
+
+        # first, find loc-demand dependencies
+        # using constraints and objective functions
+        open_demand_list = []
+        for key in _request.constraints:
+            c = _request.constraints[key]
+            if c.constraint_type == "distance_to_location":
+                for dk in c.demand_list:
+                    if _request.demands[dk].sort_base != 1:
+                        _request.demands[dk].sort_base = 1
+                        open_demand_list.append(_request.demands[dk])
+        for op in _request.objective.operand_list:
+            if op.function.func_type == "distance_between":
+                if isinstance(op.function.loc_a, demand.Location):
+                    if _request.demands[op.function.loc_z.name].sort_base != 1:
+                        _request.demands[op.function.loc_z.name].sort_base = 1
+                        open_demand_list.append(op.function.loc_z)
+                elif isinstance(op.function.loc_z, demand.Location):
+                    if _request.demands[op.function.loc_a.name].sort_base != 1:
+                        _request.demands[op.function.loc_a.name].sort_base = 1
+                        open_demand_list.append(op.function.loc_a)
+
+        if len(open_demand_list) == 0:
+            init_demand = self._exist_not_sorted_demand(_request.demands)
+            open_demand_list.append(init_demand)
+
+        # second, find demand-demand dependencies
+        while True:
+            d_list = self._get_depended_demands(open_demand_list, _request)
+            for d in d_list:
+                demand_list.append(d)
+
+            init_demand = self._exist_not_sorted_demand(_request.demands)
+            if init_demand is None:
+                break
+            open_demand_list.append(init_demand)
+
+        return demand_list
+
+    def _get_depended_demands(self, _open_demand_list, _request):
+        demand_list = []
+
+        while True:
+            if len(_open_demand_list) == 0:
+                break
+
+            d = _open_demand_list.pop(0)
+            if d.sort_base != 1:
+                d.sort_base = 1
+            demand_list.append(d)
+
+            for key in _request.constraints:
+                c = _request.constraints[key]
+                if c.constraint_type == "distance_between_demands":
+                    if d.name in c.demand_list:
+                        for dk in c.demand_list:
+                            if dk != d.name and \
+                                    _request.demands[dk].sort_base != 1:
+                                _request.demands[dk].sort_base = 1
+                                _open_demand_list.append(
+                                    _request.demands[dk])
+
+            for op in _request.objective.operand_list:
+                if op.function.func_type == "distance_between":
+                    if op.function.loc_a.name == d.name:
+                        if op.function.loc_z.name in \
+                                _request.demands.keys():
+                            if _request.demands[
+                                    op.function.loc_z.name].sort_base != 1:
+                                _request.demands[
+                                    op.function.loc_z.name].sort_base = 1
+                                _open_demand_list.append(op.function.loc_z)
+                    elif op.function.loc_z.name == d.name:
+                        if op.function.loc_a.name in \
+                                _request.demands.keys():
+                            if _request.demands[
+                                    op.function.loc_a.name].sort_base != 1:
+                                _request.demands[
+                                    op.function.loc_a.name].sort_base = 1
+                                _open_demand_list.append(op.function.loc_a)
+
+        return demand_list
+
+    def _exist_not_sorted_demand(self, _demands):
+        not_sorted_demand = None
+        for key in _demands:
+            demand = _demands[key]
+            if demand.sort_base != 1:
+                not_sorted_demand = demand
+                break
+        return not_sorted_demand
+
+
+# Used for testing. This file is in .gitignore and will NOT be checked in.
+CONFIG_FILE = ''
+
+''' for unit test '''
+if __name__ == "__main__":
+    # Prepare service-wide components (e.g., config)
+    conf = service.prepare_service([], config_files=[CONFIG_FILE])
+
+    opt = Optimizer(conf)
+    opt.get_solution()
diff --git a/conductor/conductor/solver/optimizer/random_pick.py b/conductor/conductor/solver/optimizer/random_pick.py
new file mode 100644 (file)
index 0000000..2896757
--- /dev/null
@@ -0,0 +1,43 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+from oslo_log import log
+
+from conductor.solver.optimizer import decision_path as dpath
+from conductor.solver.optimizer import search
+from random import randint
+
+LOG = log.getLogger(__name__)
+
+
+class RandomPick(search.Search):
+    def __init__(self, conf):
+        search.Search.__init__(self, conf)
+
+    def search(self, _demand_list, _request):
+        decision_path = dpath.DecisionPath()
+        decision_path.set_decisions({})
+        return self._find_current_best(_demand_list, decision_path, _request)
+
+    def _find_current_best(self, _demand_list, _decision_path, _request):
+        for demand in _demand_list:
+            r_index = randint(0, len(demand.resources) - 1)
+            best_resource = demand.resources[demand.resources.keys()[r_index]]
+            _decision_path.decisions[demand.name] = best_resource
+        return _decision_path
diff --git a/conductor/conductor/solver/optimizer/search.py b/conductor/conductor/solver/optimizer/search.py
new file mode 100755 (executable)
index 0000000..9d138e4
--- /dev/null
@@ -0,0 +1,90 @@
+#!/bin/python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+from operator import itemgetter
+from oslo_log import log
+
+from conductor.solver.optimizer import decision_path as dpath
+
+LOG = log.getLogger(__name__)
+
+
+class Search(object):
+
+    def __init__(self, conf):
+        self.conf = conf
+
+    def search(self, _demand_list, _objective):
+        decision_path = dpath.DecisionPath()
+        decision_path.set_decisions({})
+
+        ''' implement search algorithm '''
+
+        return decision_path
+
+    def _solve_constraints(self, _decision_path, _request):
+        candidate_list = []
+        for key in _decision_path.current_demand.resources:
+            resource = _decision_path.current_demand.resources[key]
+            candidate_list.append(resource)
+
+        for constraint in _decision_path.current_demand.constraint_list:
+            LOG.debug("Evaluating constraint = {}".format(constraint.name))
+            LOG.debug("Available candidates before solving "
+                      "constraint {}".format(candidate_list))
+
+            candidate_list =\
+                constraint.solve(_decision_path, candidate_list, _request)
+            LOG.debug("Available candidates after solving "
+                      "constraint {}".format(candidate_list))
+            if len(candidate_list) == 0:
+                LOG.error("No candidates found for demand {} "
+                          "when constraint {} was evaluated "
+                          "".format(_decision_path.current_demand,
+                                    constraint.name)
+                          )
+                break
+
+        if len(candidate_list) > 0:
+            self._set_candidate_cost(candidate_list)
+
+        return candidate_list
+
+    def _set_candidate_cost(self, _candidate_list):
+        for c in _candidate_list:
+            if c["inventory_type"] == "service":
+                c["cost"] = "1"
+            else:
+                c["cost"] = "2"
+        _candidate_list[:] = sorted(_candidate_list, key=itemgetter("cost"))
+
+    def print_decisions(self, _best_path):
+        if _best_path:
+            msg = "--- demand = {}, chosen resource = {} at {}"
+            for demand_name in _best_path.decisions:
+                resource = _best_path.decisions[demand_name]
+                LOG.debug(msg.format(demand_name, resource["candidate_id"],
+                                     resource["location_id"]))
+
+            msg = "--- total value of decision = {}"
+            LOG.debug(msg.format(_best_path.total_value))
+            msg = "--- total cost of decision = {}"
+            LOG.debug(msg.format(_best_path.total_cost))
diff --git a/conductor/conductor/solver/request/__init__.py b/conductor/conductor/solver/request/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/request/demand.py b/conductor/conductor/solver/request/demand.py
new file mode 100755 (executable)
index 0000000..5554cfe
--- /dev/null
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+
+class Demand(object):
+
+    def __init__(self, _name=None):
+        self.name = _name
+
+        # initial candidates (regions or services) for this demand
+        # key = region_id (or service_id),
+        # value = region (or service) instance
+        self.resources = {}
+
+        # applicable constraint checkers
+        # a list of constraint instances to be applied
+        self.constraint_list = []
+
+        # to sort demands in the optimization process
+        self.sort_base = -1
+
+
+class Location(object):
+
+    def __init__(self, _name=None):
+        self.name = _name
+        # clli, coordinates, or placemark
+        self.loc_type = None
+
+        # depending on type
+        self.value = None
diff --git a/conductor/conductor/solver/request/functions/__init__.py b/conductor/conductor/solver/request/functions/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/request/functions/cloud_version.py b/conductor/conductor/solver/request/functions/cloud_version.py
new file mode 100644 (file)
index 0000000..564468b
--- /dev/null
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+
+class CloudVersion(object):
+
+    def __init__(self, _type):
+        self.func_type = _type
+        self.loc = None
diff --git a/conductor/conductor/solver/request/functions/distance_between.py b/conductor/conductor/solver/request/functions/distance_between.py
new file mode 100755 (executable)
index 0000000..8cf3f86
--- /dev/null
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+
+from conductor.solver.utils import utils
+
+
+class DistanceBetween(object):
+
+    def __init__(self, _type):
+        self.func_type = _type
+
+        self.loc_a = None
+        self.loc_z = None
+
+    def compute(self, _loc_a, _loc_z):
+        distance = utils.compute_air_distance(_loc_a, _loc_z)
+
+        return distance
diff --git a/conductor/conductor/solver/request/objective.py b/conductor/conductor/solver/request/objective.py
new file mode 100755 (executable)
index 0000000..ca1e614
--- /dev/null
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+from conductor.solver.request import demand
+# from conductor.solver.resource import region
+# from conductor.solver.resource import service
+
+
+class Objective(object):
+
+    def __init__(self):
+        self.goal = None
+        self.operation = None
+        self.operand_list = []
+
+    def compute(self, _decision_path, _request):
+        value = 0.0
+
+        for op in self.operand_list:
+            if self.operation == "sum":
+                value += op.compute(_decision_path, _request)
+
+        _decision_path.cumulated_value = value
+        _decision_path.total_value = \
+            _decision_path.cumulated_value + \
+            _decision_path.heuristic_to_go_value
+
+
+class Operand(object):
+
+    def __init__(self):
+        self.operation = None
+        self.weight = 0
+        self.function = None
+
+    def compute(self, _decision_path, _request):
+        value = 0.0
+        cei = _request.cei
+        if self.function.func_type == "distance_between":
+            if isinstance(self.function.loc_a, demand.Location):
+                if self.function.loc_z.name in \
+                        _decision_path.decisions.keys():
+                    resource = \
+                        _decision_path.decisions[self.function.loc_z.name]
+                    loc = None
+                    # if isinstance(resource, region.Region):
+                    #     loc = resource.location
+                    # elif isinstance(resource, service.Service):
+                    #     loc = resource.region.location
+                    loc = cei.get_candidate_location(resource)
+                    value = \
+                        self.function.compute(self.function.loc_a.value, loc)
+            elif isinstance(self.function.loc_z, demand.Location):
+                if self.function.loc_a.name in \
+                        _decision_path.decisions.keys():
+                    resource = \
+                        _decision_path.decisions[self.function.loc_a.name]
+                    loc = None
+                    # if isinstance(resource, region.Region):
+                    #    loc = resource.location
+                    # elif isinstance(resource, service.Service):
+                    #    loc = resource.region.location
+                    loc = cei.get_candidate_location(resource)
+                    value = \
+                        self.function.compute(self.function.loc_z.value, loc)
+            else:
+                if self.function.loc_a.name in \
+                        _decision_path.decisions.keys() and \
+                   self.function.loc_z.name in \
+                        _decision_path.decisions.keys():
+                    resource_a = \
+                        _decision_path.decisions[self.function.loc_a.name]
+                    loc_a = None
+                    # if isinstance(resource_a, region.Region):
+                    #     loc_a = resource_a.location
+                    # elif isinstance(resource_a, service.Service):
+                    #     loc_a = resource_a.region.location
+                    loc_a = cei.get_candidate_location(resource_a)
+                    resource_z = \
+                        _decision_path.decisions[self.function.loc_z.name]
+                    loc_z = None
+                    # if isinstance(resource_z, region.Region):
+                    #     loc_z = resource_z.location
+                    # elif isinstance(resource_z, service.Service):
+                    #     loc_z = resource_z.region.location
+                    loc_z = cei.get_candidate_location(resource_z)
+
+                    value = self.function.compute(loc_a, loc_z)
+
+        if self.operation == "product":
+            value *= self.weight
+
+        return value
diff --git a/conductor/conductor/solver/request/parser.py b/conductor/conductor/solver/request/parser.py
new file mode 100755 (executable)
index 0000000..6e30549
--- /dev/null
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+# import json
+import operator
+from oslo_log import log
+import random
+# import sys
+
+from conductor.solver.optimizer.constraints \
+    import access_distance as access_dist
+from conductor.solver.optimizer.constraints \
+    import cloud_distance as cloud_dist
+from conductor.solver.optimizer.constraints \
+    import attribute as attribute_constraint
+# from conductor.solver.optimizer.constraints import constraint
+from conductor.solver.optimizer.constraints \
+    import inventory_group
+from conductor.solver.optimizer.constraints \
+    import service as service_constraint
+from conductor.solver.optimizer.constraints import zone
+from conductor.solver.request import demand
+from conductor.solver.request.functions import cloud_version
+from conductor.solver.request.functions import distance_between
+from conductor.solver.request import objective
+
+# from conductor.solver.request.functions import distance_between
+# from conductor.solver.request import objective
+# from conductor.solver.resource import region
+# from conductor.solver.resource import service
+# from conductor.solver.utils import constraint_engine_interface as cei
+# from conductor.solver.utils import utils
+
+LOG = log.getLogger(__name__)
+
+
+# FIXME(snarayanan): This is really a SolverRequest (or Request) object
+class Parser(object):
+
+    def __init__(self, _region_gen=None):
+        self.demands = {}
+        self.locations = {}
+        self.region_gen = _region_gen
+        self.constraints = {}
+        self.objective = None
+        self.cei = None
+        self.request_id = None
+
+    # def get_data_engine_interface(self):
+    #    self.cei = cei.ConstraintEngineInterface()
+
+    # FIXME(snarayanan): This should just be parse_template
+    def parse_template(self, json_template=None):
+        if json_template is None:
+            LOG.error("No template specified")
+            return "Error"
+
+        # get demands
+        demand_list = json_template["conductor_solver"]["demands"]
+        for demand_id, candidate_list in demand_list.items():
+            current_demand = demand.Demand(demand_id)
+            # candidate should only have minimal information like location_id
+            for candidate in candidate_list["candidates"]:
+                candidate_id = candidate["candidate_id"]
+                current_demand.resources[candidate_id] = candidate
+            current_demand.sort_base = 0  # this is only for testing
+            self.demands[demand_id] = current_demand
+
+        # get locations
+        location_list = json_template["conductor_solver"]["locations"]
+        for location_id, location_info in location_list.items():
+            loc = demand.Location(location_id)
+            loc.loc_type = "coordinates"
+            loc.value = (float(location_info["latitude"]),
+                         float(location_info["longitude"]))
+            self.locations[location_id] = loc
+
+        # get constraints
+        input_constraints = json_template["conductor_solver"]["constraints"]
+        for constraint_id, constraint_info in input_constraints.items():
+            constraint_type = constraint_info["type"]
+            constraint_demands = list()
+            parsed_demands = constraint_info["demands"]
+            if isinstance(parsed_demands, list):
+                for d in parsed_demands:
+                    constraint_demands.append(d)
+            else:
+                constraint_demands.append(parsed_demands)
+            if constraint_type == "distance_to_location":
+                c_property = constraint_info.get("properties")
+                location_id = c_property.get("location")
+                op = operator.le  # default operator
+                c_op = c_property.get("distance").get("operator")
+                if c_op == ">":
+                    op = operator.gt
+                elif c_op == ">=":
+                    op = operator.ge
+                elif c_op == "<":
+                    op = operator.lt
+                elif c_op == "<=":
+                    op = operator.le
+                elif c_op == "=":
+                    op = operator.eq
+                dist_value = c_property.get("distance").get("value")
+                my_access_distance_constraint = access_dist.AccessDistance(
+                    constraint_id, constraint_type, constraint_demands,
+                    _comparison_operator=op, _threshold=dist_value,
+                    _location=self.locations[location_id])
+                self.constraints[my_access_distance_constraint.name] = \
+                    my_access_distance_constraint
+            elif constraint_type == "distance_between_demands":
+                c_property = constraint_info.get("properties")
+                op = operator.le  # default operator
+                c_op = c_property.get("distance").get("operator")
+                if c_op == ">":
+                    op = operator.gt
+                elif c_op == ">=":
+                    op = operator.ge
+                elif c_op == "<":
+                    op = operator.lt
+                elif c_op == "<=":
+                    op = operator.le
+                elif c_op == "=":
+                    op = operator.eq
+                dist_value = c_property.get("distance").get("value")
+                my_cloud_distance_constraint = cloud_dist.CloudDistance(
+                    constraint_id, constraint_type, constraint_demands,
+                    _comparison_operator=op, _threshold=dist_value)
+                self.constraints[my_cloud_distance_constraint.name] = \
+                    my_cloud_distance_constraint
+            elif constraint_type == "inventory_group":
+                my_inventory_group_constraint = \
+                    inventory_group.InventoryGroup(
+                        constraint_id, constraint_type, constraint_demands)
+                self.constraints[my_inventory_group_constraint.name] = \
+                    my_inventory_group_constraint
+            elif constraint_type == "region_fit":
+                c_property = constraint_info.get("properties")
+                controller = c_property.get("controller")
+                request = c_property.get("request")
+                # inventory type is cloud for region_fit
+                inventory_type = "cloud"
+                my_service_constraint = service_constraint.Service(
+                    constraint_id, constraint_type, constraint_demands,
+                    _controller=controller, _request=request, _cost=None,
+                    _inventory_type=inventory_type)
+                self.constraints[my_service_constraint.name] = \
+                    my_service_constraint
+            elif constraint_type == "instance_fit":
+                c_property = constraint_info.get("properties")
+                controller = c_property.get("controller")
+                request = c_property.get("request")
+                # inventory type is service for instance_fit
+                inventory_type = "service"
+                my_service_constraint = service_constraint.Service(
+                    constraint_id, constraint_type, constraint_demands,
+                    _controller=controller, _request=request, _cost=None,
+                    _inventory_type=inventory_type)
+                self.constraints[my_service_constraint.name] = \
+                    my_service_constraint
+            elif constraint_type == "zone":
+                c_property = constraint_info.get("properties")
+                qualifier = c_property.get("qualifier")
+                category = c_property.get("category")
+                my_zone_constraint = zone.Zone(
+                    constraint_id, constraint_type, constraint_demands,
+                    _qualifier=qualifier, _category=category)
+                self.constraints[my_zone_constraint.name] = my_zone_constraint
+            elif constraint_type == "attribute":
+                c_property = constraint_info.get("properties")
+                my_attribute_constraint = \
+                    attribute_constraint.Attribute(constraint_id,
+                                                   constraint_type,
+                                                   constraint_demands,
+                                                   _properties=c_property)
+                self.constraints[my_attribute_constraint.name] = \
+                    my_attribute_constraint
+            else:
+                LOG.error("unknown constraint type {}".format(constraint_type))
+                return
+
+        # get objective function
+        if "objective" not in json_template["conductor_solver"]\
+           or not json_template["conductor_solver"]["objective"]:
+            self.objective = objective.Objective()
+        else:
+            input_objective = json_template["conductor_solver"]["objective"]
+            self.objective = objective.Objective()
+            self.objective.goal = input_objective["goal"]
+            self.objective.operation = input_objective["operation"]
+            for operand_data in input_objective["operands"]:
+                operand = objective.Operand()
+                operand.operation = operand_data["operation"]
+                operand.weight = float(operand_data["weight"])
+                if operand_data["function"] == "distance_between":
+                    func = distance_between.DistanceBetween("distance_between")
+                    param = operand_data["function_param"][0]
+                    if param in self.locations:
+                        func.loc_a = self.locations[param]
+                    elif param in self.demands:
+                        func.loc_a = self.demands[param]
+                    param = operand_data["function_param"][1]
+                    if param in self.locations:
+                        func.loc_z = self.locations[param]
+                    elif param in self.demands:
+                        func.loc_z = self.demands[param]
+                    operand.function = func
+                elif operand_data["function"] == "cloud_version":
+                    self.objective.goal = "min_cloud_version"
+                    func = cloud_version.CloudVersion("cloud_version")
+                    func.loc = operand_data["function_param"]
+                    operand.function = func
+
+                self.objective.operand_list.append(operand)
+
+    def map_constraints_to_demands(self):
+        # spread the constraints over the demands
+        for constraint_name, constraint in self.constraints.items():
+            for d in constraint.demand_list:
+                if d in self.demands.keys():
+                    self.demands[d].constraint_list.append(constraint)
+
diff --git a/conductor/conductor/solver/resource/__init__.py b/conductor/conductor/solver/resource/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/resource/region.py b/conductor/conductor/solver/resource/region.py
new file mode 100755 (executable)
index 0000000..fc42bd1
--- /dev/null
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+"""Cloud region"""
+
+
+class Region(object):
+
+    def __init__(self, _rid=None):
+        self.name = _rid
+
+        self.status = "active"
+
+        '''general region properties'''
+        # S (i.e., medium_lite), M (i.e., medium), or L (i.e., large)
+        self.region_type = None
+        # (latitude, longitude)
+        self.location = None
+
+        '''
+        placemark:
+
+        country_code (e.g., US),
+        postal_code (e.g., 07920),
+        administrative_area (e.g., NJ),
+        sub_administrative_area (e.g., Somerset),
+        locality (e.g., Bedminster),
+        thoroughfare (e.g., AT&T Way),
+        sub_thoroughfare (e.g., 1)
+        '''
+        self.address = {}
+
+        self.zones = {}  # Zone instances (e.g., disaster and/or update)
+        self.cost = 0.0
+
+        '''abstracted resource capacity status'''
+        self.capacity = {}
+
+        self.allocated_demand_list = []
+
+        '''other opaque metadata such as cloud_version, sriov, etc.'''
+        self.properties = {}
+
+        '''known neighbor regions to be used for constraint solving'''
+        self.neighbor_list = []  # a list of Link instances
+
+        self.last_update = 0
+
+    '''update resource capacity after allocating demand'''
+    def update_capacity(self):
+        pass
+
+    '''for logging'''
+    def get_json_summary(self):
+        pass
+
+
+class Zone(object):
+
+    def __init__(self, _zid=None):
+        self.name = _zid
+        self.zone_type = None  # disaster or update
+
+        self.region_list = []  # a list of region names
+
+    def get_json_summary(self):
+        pass
+
+
+class Link(object):
+
+    def __init__(self, _region_name):
+        self.destination_region_name = _region_name
+
+        self.distance = 0.0
+        self.nw_distance = 0.0
+        self.latency = 0.0
+        self.bandwidth = 0.0
+
+    def get_json_summary(self):
+        pass
diff --git a/conductor/conductor/solver/resource/service.py b/conductor/conductor/solver/resource/service.py
new file mode 100755 (executable)
index 0000000..faedb53
--- /dev/null
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+"""Existing service instance in a region"""
+
+
+class Service(object):
+
+    def __init__(self, _sid=None):
+        self.name = _sid
+
+        self.region = None
+
+        self.status = "active"
+
+        self.cost = 0.0
+
+        """abstracted resource capacity status"""
+        self.capacity = {}
+
+        self.allocated_demand_list = []
+
+        """other opaque metadata if necessary"""
+        self.properties = {}
+
+        self.last_update = 0
+
+    """update resource capacity after allocating demand"""
+    def update_capacity(self):
+        pass
+
+    """for logging"""
+    def get_json_summary(self):
+        pass
diff --git a/conductor/conductor/solver/service.py b/conductor/conductor/solver/service.py
new file mode 100644 (file)
index 0000000..60aa092
--- /dev/null
@@ -0,0 +1,307 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import cotyledon
+from oslo_config import cfg
+from oslo_log import log
+
+from conductor.common.models import plan
+from conductor.common.music import api
+from conductor.common.music import messaging as music_messaging
+from conductor.common.music.model import base
+from conductor.i18n import _LE, _LI
+from conductor import messaging
+from conductor import service
+from conductor.solver.optimizer import optimizer
+from conductor.solver.request import parser
+from conductor.solver.utils import constraint_engine_interface as cei
+
+
+# To use oslo.log in services:
+#
+# 0. Note that conductor.service.prepare_service() bootstraps this.
+#    It's set up within conductor.cmd.SERVICENAME.
+# 1. Add "from oslo_log import log"
+# 2. Also add "LOG = log.getLogger(__name__)"
+# 3. For i18n support, import appropriate shortcuts as well:
+#    "from i18n import _, _LC, _LE, _LI, _LW  # noqa"
+#    (that's for primary, critical, error, info, warning)
+# 4. Use LOG.info, LOG.warning, LOG.error, LOG.critical, LOG.debug, e.g.:
+#    "LOG.info(_LI("Something happened with {}").format(thingie))"
+# 5. Do NOT put translation wrappers around any LOG.debug text.
+# 6. Be liberal with logging, especially in the absence of unit tests!
+# 7. Calls to print() are verboten within the service proper.
+#    Logging can be redirected! (In a CLI-side script, print() is fine.)
+#
+# Usage: http://docs.openstack.org/developer/oslo.i18n/usage.html
+
+LOG = log.getLogger(__name__)
+
+# To use oslo.config in services:
+#
+# 0. Note that conductor.service.prepare_service() bootstraps this.
+#    It's set up within conductor.cmd.SERVICENAME.
+# 1. Add "from oslo_config import cfg"
+# 2. Also add "CONF = cfg.CONF"
+# 3. Set a list of locally used options (SOLVER_OPTS is fine).
+#    Choose key names thoughtfully. Be technology-agnostic, avoid TLAs, etc.
+# 4. Register, e.g. "CONF.register_opts(SOLVER_OPTS, group='solver')"
+# 5. Add file reference to opts.py (may need to use itertools.chain())
+# 6. Run tox -e genconfig to build a new config template.
+# 7. If you want to load an entire config from a CLI you can do this:
+#    "conf = service.prepare_service([], config_files=[CONFIG_FILE])"
+# 8. You can even use oslo_config from a CLI and override values on the fly,
+#    e.g., "CONF.set_override('hostnames', ['music2'], 'music_api')"
+#    (leave the third arg out to use the DEFAULT group).
+# 9. Loading a config from a CLI is optional. So long as all the options
+#    have defaults (or you override them as needed), it should all work.
+#
+# Docs: http://docs.openstack.org/developer/oslo.config/
+
+CONF = cfg.CONF
+
+SOLVER_OPTS = [
+    cfg.IntOpt('workers',
+               default=1,
+               min=1,
+               help='Number of workers for solver service. '
+                    'Default value is 1.'),
+    cfg.BoolOpt('concurrent',
+                default=False,
+                help='Set to True when solver will run in active-active '
+                     'mode. When set to False, solver will restart any '
+                     'orphaned solving requests at startup.'),
+]
+
+CONF.register_opts(SOLVER_OPTS, group='solver')
+
+# Pull in service opts. We use them here.
+OPTS = service.OPTS
+CONF.register_opts(OPTS)
+
+
+class SolverServiceLauncher(object):
+    """Launcher for the solver service."""
+    def __init__(self, conf):
+        self.conf = conf
+
+        # Set up Music access.
+        self.music = api.API()
+        self.music.keyspace_create(keyspace=conf.keyspace)
+
+        # Dynamically create a plan class for the specified keyspace
+        self.Plan = base.create_dynamic_model(
+            keyspace=conf.keyspace, baseclass=plan.Plan, classname="Plan")
+
+        if not self.Plan:
+            raise
+
+    def run(self):
+        kwargs = {'plan_class': self.Plan}
+        svcmgr = cotyledon.ServiceManager()
+        svcmgr.add(SolverService,
+                   workers=self.conf.solver.workers,
+                   args=(self.conf,), kwargs=kwargs)
+        svcmgr.run()
+
+
+class SolverService(cotyledon.Service):
+    """Solver service."""
+
+    # This will appear in 'ps xaf'
+    name = "Conductor Solver"
+
+    def __init__(self, worker_id, conf, **kwargs):
+        """Initializer"""
+        LOG.debug("%s" % self.__class__.__name__)
+        super(SolverService, self).__init__(worker_id)
+        self._init(conf, **kwargs)
+        self.running = True
+
+    def _init(self, conf, **kwargs):
+        """Set up the necessary ingredients."""
+        self.conf = conf
+        self.kwargs = kwargs
+
+        self.Plan = kwargs.get('plan_class')
+
+        # Set up the RPC service(s) we want to talk to.
+        self.data_service = self.setup_rpc(conf, "data")
+
+        # Set up the cei and optimizer
+        self.cei = cei.ConstraintEngineInterface(self.data_service)
+        # self.optimizer = optimizer.Optimizer(conf)
+
+        # Set up Music access.
+        self.music = api.API()
+
+        if not self.conf.solver.concurrent:
+            self._reset_solving_status()
+
+    def _gracefully_stop(self):
+        """Gracefully stop working on things"""
+        pass
+
+    def _reset_solving_status(self):
+        """Reset plans being solved so they are solved again.
+
+        Use this only when the solver service is not running concurrently.
+        """
+        plans = self.Plan.query.all()
+        for the_plan in plans:
+            if the_plan.status == self.Plan.SOLVING:
+                the_plan.status = self.Plan.TRANSLATED
+                the_plan.update()
+
+    def _restart(self):
+        """Prepare to restart the service"""
+        pass
+
+    def setup_rpc(self, conf, topic):
+        """Set up the RPC Client"""
+        # TODO(jdandrea): Put this pattern inside music_messaging?
+        transport = messaging.get_transport(conf=conf)
+        target = music_messaging.Target(topic=topic)
+        client = music_messaging.RPCClient(conf=conf,
+                                           transport=transport,
+                                           target=target)
+        return client
+
+    def run(self):
+        """Run"""
+        LOG.debug("%s" % self.__class__.__name__)
+        # TODO(snarayanan): This is really meant to be a control loop
+        # As long as self.running is true, we process another request.
+        while self.running:
+            # plans = Plan.query().all()
+            # Find the first plan with a status of TRANSLATED.
+            # Change its status to SOLVING.
+            # Then, read the "translated" field as "template".
+            json_template = None
+            requests_to_solve = dict()
+            plans = self.Plan.query.all()
+            found_translated_template = False
+            for p in plans:
+                if p.status == self.Plan.TRANSLATED:
+                    json_template = p.translation
+                    found_translated_template = True
+                    break
+            if found_translated_template and not json_template:
+                message = _LE("Plan {} status is translated, yet "
+                              "the translation wasn't found").format(p.id)
+                LOG.error(message)
+                p.status = self.Plan.ERROR
+                p.message = message
+                p.update()
+                continue
+            elif not json_template:
+                continue
+
+            p.status = self.Plan.SOLVING
+            p.update()
+
+            request = parser.Parser()
+            request.cei = self.cei
+            try:
+                request.parse_template(json_template)
+            except Exception as err:
+                message = _LE("Plan {} status encountered a "
+                              "parsing error: {}").format(p.id, err.message)
+                LOG.error(message)
+                p.status = self.Plan.ERROR
+                p.message = message
+                p.update()
+                continue
+
+            request.map_constraints_to_demands()
+            requests_to_solve[p.id] = request
+            opt = optimizer.Optimizer(self.conf, _requests=requests_to_solve)
+            solution = opt.get_solution()
+
+            recommendations = []
+            if not solution or not solution.decisions:
+                message = _LI("Plan {} search failed, no "
+                              "recommendations found").format(p.id)
+                LOG.info(message)
+                # Update the plan status
+                p.status = self.Plan.NOT_FOUND
+                p.message = message
+                p.update()
+            else:
+                # Assemble recommendation result JSON
+                for demand_name in solution.decisions:
+                    resource = solution.decisions[demand_name]
+
+                    rec = {
+                        # FIXME(shankar) A&AI must not be hardcoded here.
+                        # Also, account for more than one Inventory Provider.
+                        "inventory_provider": "aai",
+                        "service_resource_id":
+                            resource.get("service_resource_id"),
+                        "candidate": {
+                            "candidate_id": resource.get("candidate_id"),
+                            "inventory_type": resource.get("inventory_type"),
+                            "cloud_owner": resource.get("cloud_owner"),
+                            "location_type": resource.get("location_type"),
+                            "location_id": resource.get("location_id")},
+                        "attributes": {
+                            "physical-location-id":
+                                resource.get("physical_location_id"),
+                            "sriov_automation":
+                                resource.get("sriov_automation"),
+                            "cloud_owner": resource.get("cloud_owner"),
+                            'cloud_version': resource.get("cloud_region_version")},
+                    }
+                    if rec["candidate"]["inventory_type"] == "service":
+                        rec["attributes"]["host_id"] = resource.get("host_id")
+                        rec["candidate"]["host_id"] = resource.get("host_id")
+
+                    # TODO(snarayanan): Add total value to recommendations?
+                    # msg = "--- total value of decision = {}"
+                    # LOG.debug(msg.format(_best_path.total_value))
+                    # msg = "--- total cost of decision = {}"
+                    # LOG.debug(msg.format(_best_path.total_cost))
+
+                    recommendations.append({demand_name: rec})
+
+                # Update the plan with the solution
+                p.solution = {
+                    "recommendations": recommendations
+                }
+                p.status = self.Plan.SOLVED
+                p.update()
+            LOG.info(_LI("Plan {} search complete, solution with {} "
+                         "recommendations found").
+                     format(p.id, len(recommendations)))
+            LOG.debug("Plan {} detailed solution: {}".
+                      format(p.id, p.solution))
+
+            # Check status, update plan with response, SOLVED or ERROR
+
+    def terminate(self):
+        """Terminate"""
+        LOG.debug("%s" % self.__class__.__name__)
+        self.running = False
+        self._gracefully_stop()
+        super(SolverService, self).terminate()
+
+    def reload(self):
+        """Reload"""
+        LOG.debug("%s" % self.__class__.__name__)
+        self._restart()
diff --git a/conductor/conductor/solver/simulators/__init__.py b/conductor/conductor/solver/simulators/__init__.py
new file mode 100644 (file)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/simulators/a_and_ai/__init__.py b/conductor/conductor/solver/simulators/a_and_ai/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/simulators/valet/__init__.py b/conductor/conductor/solver/simulators/valet/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/utils/__init__.py b/conductor/conductor/solver/utils/__init__.py
new file mode 100755 (executable)
index 0000000..f2bbdfd
--- /dev/null
@@ -0,0 +1,19 @@
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/conductor/conductor/solver/utils/constraint_engine_interface.py b/conductor/conductor/solver/utils/constraint_engine_interface.py
new file mode 100644 (file)
index 0000000..de335d6
--- /dev/null
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+"""Constraint/Engine Interface
+
+Utility library that defines the interface between
+the constraints and the conductor data engine.
+
+"""
+
+from oslo_log import log
+
+LOG = log.getLogger(__name__)
+
+
+class ConstraintEngineInterface(object):
+    def __init__(self, client):
+        self.client = client
+
+    def get_candidate_location(self, candidate):
+        # Try calling a method (remember, "calls" are synchronous)
+        # FIXME(jdandrea): Doing this because Music calls are expensive.
+        lat = candidate.get('latitude')
+        lon = candidate.get('longitude')
+        if lat and lon:
+            response = (float(lat), float(lon))
+        else:
+            ctxt = {}
+            args = {"candidate": candidate}
+            response = self.client.call(ctxt=ctxt,
+                                        method="get_candidate_location",
+                                        args=args)
+            LOG.debug("get_candidate_location response: {}".format(response))
+        return response
+
+    def get_candidate_zone(self, candidate, _category=None):
+        # FIXME(jdandrea): Doing this because Music calls are expensive.
+        if _category == 'region':
+            response = candidate['location_id']
+        elif _category == 'complex':
+            response = candidate['complex_name']
+        else:
+            ctxt = {}
+            args = {"candidate": candidate, "category": _category}
+            response = self.client.call(ctxt=ctxt,
+                                        method="get_candidate_zone",
+                                        args=args)
+            LOG.debug("get_candidate_zone response: {}".format(response))
+        return response
+
+    def get_candidates_from_service(self, constraint_name,
+                                    constraint_type, candidate_list,
+                                    controller, inventory_type,
+                                    request, cost, demand_name):
+        ctxt = {}
+        args = {"constraint_name": constraint_name,
+                "constraint_type": constraint_type,
+                "candidate_list": candidate_list,
+                "controller": controller,
+                "inventory_type": inventory_type,
+                "request": request,
+                "cost": cost,
+                "demand_name": demand_name}
+        response = self.client.call(ctxt=ctxt,
+                                    method="get_candidates_from_service",
+                                    args=args)
+        LOG.debug("get_candidates_from_service response: {}".format(response))
+        # response is a list of (candidate, cost) tuples
+        return response
+
+    def get_inventory_group_candidates(self, candidate_list,
+                                       demand_name, resolved_candidate):
+        # return a list of the "pair" candidates for the given candidate
+        ctxt = {}
+        args = {"candidate_list": candidate_list,
+                "demand_name": demand_name,
+                "resolved_candidate": resolved_candidate}
+        response = self.client.call(ctxt=ctxt,
+                                    method="get_inventory_group_candidates",
+                                    args=args)
+        LOG.debug("get_inventory_group_candidates \
+                   response: {}".format(response))
+        return response
+
+    def get_candidates_by_attributes(self, demand_name,
+                                     candidate_list, properties):
+        ctxt = {}
+        args = {"candidate_list": candidate_list,
+                "properties": properties,
+                "demand_name": demand_name}
+        response = self.client.call(ctxt=ctxt,
+                                    method="get_candidates_by_attributes",
+                                    args=args)
+        LOG.debug("get_candidates_by_attribute response: {}".format(response))
+        # response is a list of (candidate, cost) tuples
+        return response
diff --git a/conductor/conductor/solver/utils/utils.py b/conductor/conductor/solver/utils/utils.py
new file mode 100755 (executable)
index 0000000..5cec51f
--- /dev/null
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# -------------------------------------------------------------------------
+#   Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+import math
+
+
+def compute_air_distance(_src, _dst):
+    """Compute Air Distance
+
+    based on latitude and longitude
+    input: a pair of (lat, lon)s
+    output: air distance as km
+    """
+    distance = 0.0
+
+    if _src == _dst:
+        return distance
+
+    radius = 6371.0  # km
+
+    dlat = math.radians(_dst[0] - _src[0])
+    dlon = math.radians(_dst[1] - _src[1])
+    a = math.sin(dlat / 2.0) * math.sin(dlat / 2.0) + \
+        math.cos(math.radians(_src[0])) * \
+        math.cos(math.radians(_dst[0])) * \
+        math.sin(dlon / 2.0) * math.sin(dlon / 2.0)
+    c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1.0 - a))
+    distance = radius * c
+
+    return distance
+
+
+def convert_km_to_miles(_km):
+    return _km * 0.621371
+
+
+def convert_miles_to_km(_miles):
+    return _miles / 0.621371