Fix osdf code after upgrading to py38 83/112783/4
authorvrvarma <vikas.varma@att.com>
Thu, 17 Sep 2020 06:24:00 +0000 (02:24 -0400)
committervrvarma <vikas.varma@att.com>
Thu, 17 Sep 2020 08:52:05 +0000 (04:52 -0400)
Fix osdf logging to work with 3.8
Fix osdf code that broke after migration
Fix test cases after migration
Fixing pep8 violations

Change-Id: I11ca33959882c8b9010f00ff744d59c7eeb3c2f2
Signed-off-by: vrvarma <vikas.varma@att.com>
Issue-ID: OPTFRA-796

15 files changed:
config/log.yml
osdf/apps/baseapp.py
osdf/logging/__init__.py
osdf/logging/monkey.py [new file with mode: 0644]
osdf/logging/oof_mdc_context.py [new file with mode: 0644]
osdf/logging/oof_mdc_formatter.py [new file with mode: 0644]
osdf/logging/osdf_logging.py
osdf/utils/data_conversion.py
osdf/utils/mdc_utils.py
runtime/model_api.py
runtime/optim_engine.py
runtime/solvers/mzn/mzn_solver.py
test/config/log.yml
test/logging/test_osdf_logging.py
test/mainapp/test_osdfapp.py

index 0b8815f..ad0de21 100644 (file)
@@ -3,19 +3,19 @@ disable_existing_loggers: True
 
 loggers:
   error:
-    handlers: [error_handler]
+    handlers: [error_handler, console_handler]
     level: "WARN"
     propagate: True
   debug:
-    handlers: [debug_handler]
+    handlers: [debug_handler, console_handler]
     level: "DEBUG"
     propagate: True
   metrics:
-    handlers: [metrics_handler]
+    handlers: [metrics_handler, console_handler]
     level: "INFO"
     propagate: True
   audit:
-    handlers: [audit_handler]
+    handlers: [audit_handler, console_handler]
     level: "INFO"
     propagate: True
 handlers:
@@ -59,37 +59,42 @@ handlers:
     utc: True
     delay: False
     backupCount: 10
+  console_handler:
+    level: "DEBUG"
+    class: "logging.StreamHandler"
+    formatter: "metricsFormat"
+
 formatters:
   standard:
     format: "%(asctime)s|||||%(name)s||%(thread)||%(funcName)s||%(levelname)s||%(message)s"
   debugFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {threadID} {serverName} {serviceName} {instanceUUID} {upperLogLevel} {severity} {serverIPAddress} {server} {IPAddress} {className} {timer} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "%(asctime)s.%(msecs)03d+00:00|{requestID}|%(threadName)s|{server}|%(levelname)s|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   errorFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {threadID} {serviceName} {partnerName} {targetEntity} {targetServiceName} {errorCode} {errorDescription} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "%(asctime)s.%(msecs)03d+00:00|{requestID}|%(threadName)s|{serviceName}|{partnerName}\
+    |{targetEntity}|{targetServiceName}|%(levelname)s|{errorCode}|{errorDescription}|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   auditFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {serviceInstanceID} {threadID} {serverName} {serviceName} {partnerName} {statusCode} {responseCode} {responseDescription} {instanceUUID} {upperLogLevel} {severity} \
-             {serverIPAddress} {timer} {server} {IPAddress} {className} {unused} {processKey} {customField1} {customField2} {customField3} {customField4} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "{entryTimestamp}+00:00|%(asctime)s.%(msecs)03d+00:00|{requestID}|{serviceInstanceID}\
+        |%(threadName)s|{server}|{serviceName}|{partnerName}|{statusCode}|{responseCode}|{responseDescription}\
+        |{instanceUUID}|%(levelname)s|{severity}|{serverIPAddress}|{timer}|{server}|{IPAddress}||{unused}\
+        |{processKey}|{customField1}|{customField2}|{customField3}|{customField4}|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   metricsFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {serviceInstanceID} {threadID} {serverName} {serviceName} {partnerName} \
-             {targetEntity} {targetServiceName} {statusCode} {responseCode} {responseDescription} \
-             {instanceUUID} {upperLogLevel} {severity} {serverIPAddress} {timer} {server} {IPAddress} \
-             {className} {unused} {processKey} {targetVirtualEntity} {customField1} {customField2} \
-             {customField3} {customField4} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
-
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "{entryTimestamp}+00:00|%(asctime)s.%(msecs)03d+00:00|{requestID}|{serviceInstanceID}\
+        |%(threadName)s|{server}|{serviceName}|{partnerName}|{targetEntity}|{targetServiceName}|{statusCode}|{responseCode}|{responseDescription}\
+        |{instanceUUID}|%(levelname)s|{severity}|{serverIPAddress}|{timer}|{server}|{IPAddress}||{unused}\
+        |{processKey}|{TargetVirtualEntity}|{customField1}|{customField2}|{customField3}|{customField4}|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   mdcFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {invocationID} {serviceName} {serviceIP}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(asctime)s.%(msecs)03d+00:00|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+    mdcfmt: "{requestID} {invocationID} {serviceName} {serverIPAddress}"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
index fa301d8..c4fc82d 100644 (file)
@@ -21,24 +21,35 @@ OSDF Manager Main Flask Application
 """
 
 import json
+from optparse import OptionParser
 import ssl
 import sys
 import time
 import traceback
-from optparse import OptionParser
 
+from flask import Flask
+from flask import g
+from flask import request
+from flask import Response
+from onaplogging.mdcContext import MDC
 import pydevd
-from flask import Flask, request, Response, g
 from requests import RequestException
 from schematics.exceptions import DataError
 
 import osdf.adapters.aaf.sms as sms
-import osdf.operation.responses
 from osdf.config.base import osdf_config
-from osdf.logging.osdf_logging import error_log, debug_log
-from osdf.operation.error_handling import request_exception_to_json_body, internal_error_message
+from osdf.logging.osdf_logging import audit_log
+from osdf.logging.osdf_logging import debug_log
+from osdf.logging.osdf_logging import error_log
+from osdf.operation.error_handling import internal_error_message
+from osdf.operation.error_handling import request_exception_to_json_body
 from osdf.operation.exceptions import BusinessException
-from osdf.utils.mdc_utils import clear_mdc, mdc_from_json, default_mdc, get_request_id
+import osdf.operation.responses
+from osdf.utils.mdc_utils import clear_mdc
+from osdf.utils.mdc_utils import get_request_id
+from osdf.utils.mdc_utils import populate_default_mdc
+from osdf.utils.mdc_utils import populate_mdc
+from osdf.utils.mdc_utils import set_error_details
 
 ERROR_TEMPLATE = osdf.ERROR_TEMPLATE
 
@@ -49,8 +60,11 @@ BAD_CLIENT_REQUEST_MESSAGE = 'Client sent an invalid request'
 
 @app.errorhandler(BusinessException)
 def handle_business_exception(e):
-    """An exception explicitly raised due to some business rule"""
-    error_log.error("Synchronous error for request id {} {}".format(g.request_id, traceback.format_exc()))
+    """An exception explicitly raised due to some business rule
+
+    """
+    error_log.error("Synchronous error for request id {} {}"
+                    .format(g.request_id, traceback.format_exc()))
     err_msg = ERROR_TEMPLATE.render(description=str(e))
     response = Response(err_msg, content_type='application/json; charset=utf-8')
     response.status_code = 400
@@ -59,8 +73,9 @@ def handle_business_exception(e):
 
 @app.errorhandler(RequestException)
 def handle_request_exception(e):
-    """Returns a detailed synchronous message to the calling client
-    when osdf fails due to a remote call to another system"""
+    """Returns a detailed synchronous message to the calling client when osdf fails due to a remote call to another system
+
+    """
     error_log.error("Synchronous error for request id {} {}".format(g.request_id, traceback.format_exc()))
     err_msg = request_exception_to_json_body(e)
     response = Response(err_msg, content_type='application/json; charset=utf-8')
@@ -70,7 +85,9 @@ def handle_request_exception(e):
 
 @app.errorhandler(DataError)
 def handle_data_error(e):
-    """Returns a detailed message to the calling client when the initial synchronous message is invalid"""
+    """Returns a detailed message to the calling client when the initial synchronous message is invalid
+
+    """
     error_log.error("Synchronous error for request id {} {}".format(g.request_id, traceback.format_exc()))
 
     body_dictionary = {
@@ -89,29 +106,65 @@ def handle_data_error(e):
 
 @app.before_request
 def log_request():
-    g.request_start = time.process_time()
-    if request.data:
-        if request.get_json():
-            request_json = request.get_json()
-            g.request_id = get_request_id(request_json)
-            mdc_from_json(request_json)
-        else:
-            g.request_id = "N/A"
-            default_mdc()
+    clear_mdc()
+    if request.content_type and 'json' in request.content_type:
+        populate_mdc(request)
+        g.request_id = get_request_id(request.get_json())
+        log_message(json.dumps(request.get_json()), "INPROGRESS", 'ENTRY')
     else:
-        g.request_id = "N/A"
-        default_mdc()
+        populate_default_mdc(request)
+        log_message('', "INPROGRESS", 'ENTRY')
 
 
 @app.after_request
 def log_response(response):
-    clear_mdc()
+    log_response_data(response)
     return response
 
 
+def log_response_data(response):
+    status_value = ''
+    try:
+        status_value = map_status_value(response)
+        log_message(response.get_data(as_text=True), status_value, 'EXIT')
+    except Exception:
+        try:
+            set_default_audit_mdc(request, status_value, 'EXIT')
+            audit_log.info(response.get_data(as_text=True))
+        except Exception:
+            set_error_details(300, 'Internal Error')
+            error_log.error("Error logging the response data due to {}".format(traceback.format_exc()))
+
+
+def set_default_audit_mdc(request, status_value, p_marker):
+    MDC.put('partnerName', 'internal')
+    MDC.put('serviceName', request.path)
+    MDC.put('statusCode', status_value)
+    MDC.put('requestID', 'internal')
+    MDC.put('timer', int((time.process_time() - g.request_start) * 1000))
+    MDC.put('customField1', p_marker)
+
+
+def log_message(message, status_value, p_marker='INVOKE'):
+    MDC.put('statusCode', status_value)
+    MDC.put('customField1', p_marker)
+    MDC.put('timer', int((time.process_time() - g.request_start) * 1000))
+    audit_log.info(message)
+
+
+def map_status_value(response):
+    if 200 <= response.status_code < 300:
+        status_value = "COMPLETE"
+    else:
+        status_value = "ERROR"
+    return status_value
+
+
 @app.errorhandler(500)
 def internal_failure(error):
-    """Returned when unexpected coding errors occur during initial synchronous processing"""
+    """Returned when unexpected coding errors occur during initial synchronous processing
+
+    """
     error_log.error("Synchronous error for request id {} {}".format(g.request_id, traceback.format_exc()))
     response = Response(internal_error_message, content_type='application/json; charset=utf-8')
     response.status_code = 500
@@ -127,8 +180,10 @@ def get_options(argv):
     parser.add_option("-l", "--local", dest="local", help="run locally", action="store_true", default=False)
     parser.add_option("-t", "--devtest", dest="devtest", help="run in dev/test environment", action="store_true",
                       default=False)
-    parser.add_option("-d", "--debughost", dest="debughost", help="IP Address of host running debug server", default='')
-    parser.add_option("-p", "--debugport", dest="debugport", help="Port number of debug server", type=int, default=5678)
+    parser.add_option("-d", "--debughost", dest="debughost",
+                      help="IP Address of host running debug server", default='')
+    parser.add_option("-p", "--debugport", dest="debugport",
+                      help="Port number of debug server", type=int, default=5678)
     opts, args = parser.parse_args(argv)
 
     if opts.debughost:
index 4b25e5b..df7613d 100644 (file)
@@ -15,3 +15,7 @@
 #
 # -------------------------------------------------------------------------
 #
+
+import yaml
+
+yaml.warnings({'YAMLLoadWarning': False})
diff --git a/osdf/logging/monkey.py b/osdf/logging/monkey.py
new file mode 100644 (file)
index 0000000..f6041bc
--- /dev/null
@@ -0,0 +1,35 @@
+# -------------------------------------------------------------------------
+#   Copyright (c) 2020 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+
+__all__ = ["patch_all"]
+
+from onaplogging.logWatchDog import patch_loggingYaml
+
+from osdf.logging.oof_mdc_context import patch_logging_mdc
+
+
+def patch_all(mdc=True, yaml=True):
+    """monkey patch osdf logging to enable mdc
+
+    """
+    if mdc is True:
+        patch_logging_mdc()
+
+    if yaml is True:
+        patch_loggingYaml()
diff --git a/osdf/logging/oof_mdc_context.py b/osdf/logging/oof_mdc_context.py
new file mode 100644 (file)
index 0000000..9c9b52c
--- /dev/null
@@ -0,0 +1,170 @@
+# -------------------------------------------------------------------------
+#   Copyright (c) 2020 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import logging
+import re
+import sys
+
+from onaplogging.marker import Marker
+from onaplogging.marker import MARKER_TAG
+from onaplogging.mdcContext import _replace_func_name
+from onaplogging.mdcContext import fetchkeys
+from onaplogging.mdcContext import findCaller as fc
+from onaplogging.mdcContext import MDC
+
+from osdf.utils.mdc_utils import set_error_details
+
+
+def findCaller(self, stack_info=False, stacklevel=1):
+    """replacing onaplogging.mdcContext with this method to work with py3.8
+
+    """
+    return fc(stack_info)
+
+
+def mdc_mapper():
+    """Convert the MDC dict into comma separated, name=value string
+
+    :return: string format
+    """
+    return ','.join(f'{k}={v}' for (k, v) in MDC.result().items() if k not in ['customField2'])
+
+
+@fetchkeys
+def info(self, msg, *args, **kwargs):
+    """Wrapper method for log.info is called
+
+    """
+    if self.isEnabledFor(logging.INFO):
+        MDC.put('customField2', mdc_mapper())
+        self._log(logging.INFO, no_sep(msg), args, **kwargs)
+
+
+@fetchkeys
+def debug(self, msg, *args, **kwargs):
+    """Wrapper method for log.debug is called
+
+    msg: log message
+    args: logging args
+    kwargs: all the optional args
+    """
+    if self.isEnabledFor(logging.DEBUG):
+        self._log(logging.DEBUG, no_sep(msg), args, **kwargs)
+
+
+@fetchkeys
+def warning(self, msg, *args, **kwargs):
+    """Wrapper method for log.warning is called
+
+    msg: log message
+    args: logging args
+    kwargs: all the optional args
+    """
+    if self.isEnabledFor(logging.WARNING):
+        self._log(logging.WARNING, no_sep(msg), args, **kwargs)
+
+
+@fetchkeys
+def exception(self, msg, *args, **kwargs):
+    """Wrapper method for log.exception is called
+
+    msg: log message
+    args: logging args
+    kwargs: all the optional args
+    """
+    kwargs['exc_info'] = 1
+    self.error(no_sep(msg), *args, **kwargs)
+
+
+@fetchkeys
+def critical(self, msg, *args, **kwargs):
+    """Wrapper method for log.critical
+
+    msg: log message
+    args: logging args
+    kwargs: all the optional args
+    """
+    if self.isEnabledFor(logging.CRITICAL):
+        self._log(logging.CRITICAL, no_sep(msg), args, **kwargs)
+
+
+@fetchkeys
+def error(self, msg, *args, **kwargs):
+    """Wrapper method for log.error is called
+
+    msg: log message
+    args: logging args
+    kwargs: all the optional args
+    """
+    if self.isEnabledFor(logging.ERROR):
+        if not MDC.get('errorCode'):
+            set_error_details(400, 'Internal Error')
+        MDC.put('customField2', mdc_mapper())
+        self._log(logging.ERROR, no_sep(msg), args, **kwargs)
+
+
+@fetchkeys
+def log(self, level, msg, *args, **kwargs):
+    """Wrapper method for log.log is called
+
+    msg: log message
+    args: logging args
+    kwargs: all the optional args
+    """
+    if not isinstance(level, int):
+        if logging.raiseExceptions:
+            raise TypeError("level must be an integer")
+        else:
+            return
+
+    if self.isEnabledFor(level):
+        self._log(level, no_sep(msg), args, **kwargs)
+
+
+def handle(self, record):
+    """Wrapper method for log.handle is called
+
+    """
+    c_marker = getattr(self, MARKER_TAG, None)
+
+    if isinstance(c_marker, Marker):
+        setattr(record, MARKER_TAG, c_marker)
+
+    if (not self.disabled) and self.filter(record):
+        self.callHandlers(record)
+
+
+def no_sep(message):
+    """This method will remove newline, | from the message
+
+    """
+    if message is None:
+        return ''
+    return re.sub(r'[\|\n]', ' ', str(message))
+
+
+def patch_logging_mdc():
+    """The patch to add MDC ability in logging Record instance at runtime
+
+    """
+    local_module = sys.modules[__name__]
+    for attr in dir(logging.Logger):
+        if attr in _replace_func_name:
+            new_func = getattr(local_module, attr, None)
+            if new_func:
+                setattr(logging.Logger, attr, new_func)
diff --git a/osdf/logging/oof_mdc_formatter.py b/osdf/logging/oof_mdc_formatter.py
new file mode 100644 (file)
index 0000000..6272a18
--- /dev/null
@@ -0,0 +1,51 @@
+# -------------------------------------------------------------------------
+#   Copyright (c) 2020 AT&T Intellectual Property
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import time
+
+from onaplogging.colorFormatter import RESET
+from onaplogging.mdcformatter import MDCFormatter
+
+
+class OOFMDCFormatter(MDCFormatter):
+    """ONAP MDC formatter
+
+    """
+
+    def __init__(self, fmt=None, mdcfmt=None,
+                 datefmt=None, colorfmt=None, style="%"):
+        super().__init__(fmt, mdcfmt, datefmt, colorfmt, style)
+        self.converter = time.gmtime
+
+    def _replaceStr(self, keys):
+        """overriding the default behavior
+
+        keys: keys that needs to be substituted
+
+        """
+        fmt = self._mdcFmt
+        for i in keys:
+            fmt = fmt.replace(i, i)
+
+        return fmt
+
+    def format(self, record):
+        """Removing the color format end of line character.
+
+        """
+        return super(OOFMDCFormatter, self).format(record).replace(RESET, '')
index a5a9739..7ebaa99 100755 (executable)
 #
 # -------------------------------------------------------------------------
 #
-
 import logging
+from logging import config
 import os
 import traceback
-from logging import config
 
 import yaml
-from onaplogging import monkey
 
+from osdf.logging import monkey
 from osdf.utils.programming_utils import MetaSingleton
 
 BASE_DIR = os.path.dirname(__file__)
@@ -32,6 +31,7 @@ LOGGING_FILE = os.path.join(BASE_DIR, '..', '..', 'config', 'log.yml')
 
 def format_exception(err, prefix=None):
     """Format operation for use with ecomp logging
+
     :param err: exception object
     :param prefix: prefix string message
     :return: formatted exception (via repr(traceback.format_tb(err.__traceback__))
@@ -50,9 +50,11 @@ def create_log_dirs():
             os.makedirs(os.path.dirname(a['filename']), exist_ok=True)
 
 
-class OOF_OSDFLogMessageHelper(metaclass=MetaSingleton):
+class OOFOSDFLogMessageHelper(metaclass=MetaSingleton):
     """Provides loggers as a singleton (otherwise, we end up with duplicate messages).
+
     Provides error_log, metric_log, audit_log, and debug_log (in that order)
+
     Additionally can provide specific log handlers too
     """
     log_handlers = None
@@ -60,13 +62,13 @@ class OOF_OSDFLogMessageHelper(metaclass=MetaSingleton):
 
     def get_handlers(self, levels=None):
         """Return ONAP-compliant log handlers for different levels. Each "level" ends up in a different log file
+
         with a prefix of that level.
 
         For example: error_log, metrics_log, audit_log, debug_log in that order
+
         :param levels: None or list of levels subset of self.default_levels (["error", "metrics", "audit", "debug"])
-        :param log_version: Currently only pre_onap is supported
-        :param config_file: Logging configuration file for ONAP compliant logging
-        :param service_name: Name of the service
+
         :return: list of log_handlers in the order of levels requested.
               if levels is None: we return handlers for self.default_levels
               if levels is ["error", "audit"], we return log handlers for that.
@@ -78,154 +80,240 @@ class OOF_OSDFLogMessageHelper(metaclass=MetaSingleton):
         return [logging.getLogger(x) for x in wanted_levels]
 
 
-class OOF_OSDFLogMessageFormatter(object):
+class OOFOSDFLogMessageFormatter(object):
 
     @staticmethod
     def accepted_valid_request(req_id, request):
+        """valid request message formatter
+
+        """
         return "Accepted valid request for ID: {} for endpoint: {}".format(
             req_id, request.url)
 
     @staticmethod
     def invalid_request(req_id, err):
+        """invalid request message formatter
+
+        """
         return "Invalid request for request ID: {}; cause: {}".format(
             req_id, format_exception(err))
 
     @staticmethod
     def invalid_response(req_id, err):
+        """invalid response message formatter
+
+        """
         return "Invalid response for request ID: {}; cause: {}".format(
             req_id, format_exception(err))
 
     @staticmethod
     def malformed_request(request, err):
+        """Malformed request message formatter
+
+        """
         return "Malformed request for URL {}, from {}; cause: {}".format(
             request.url, request.remote_address, format_exception(err))
 
     @staticmethod
     def malformed_response(response, client, err):
+        """Malformed response message formatter
+
+        """
         return "Malformed response {} for client {}; cause: {}".format(
             response, client, format_exception(err))
 
     @staticmethod
     def need_policies(req_id):
+        """Policies needed message formatter
+
+        """
         return "Policies required but found no policies for request ID: {}".format(req_id)
 
     @staticmethod
     def policy_service_error(url, req_id, err):
+        """policy service error message formatter
+
+        """
         return "Unable to call policy for {} for request ID: {}; cause: {}".format(
             url, req_id, format_exception(err))
 
     @staticmethod
     def requesting_url(url, req_id):
+        """Message formatter for requesting url
+
+        """
         return "Making a call to URL {} for request ID: {}".format(url, req_id)
 
     @staticmethod
     def requesting(service_name, req_id):
+        """Message formatter for requesting a service
+
+        """
         return "Making a call to service {} for request ID: {}".format(service_name, req_id)
 
     @staticmethod
     def error_requesting(service_name, req_id, err):
+        """Message formatter on error requesting a service
+
+        """
         return "Error while requesting service {} for request ID: {}; cause: {}".format(
             service_name, req_id, format_exception(err))
 
     @staticmethod
     def calling_back(req_id, callback_url):
+        """Message formatter when a callback url is invoked
+
+        """
         return "Posting result to callback URL for request ID: {}; callback URL={}".format(
             req_id, callback_url)
 
     @staticmethod
     def calling_back_with_body(req_id, callback_url, body):
+        """Message formatter when a callback url with body is invoked
+
+        """
         return "Posting result to callback URL for request ID: {}; callback URL={} body={}".format(
             req_id, callback_url, body)
 
     @staticmethod
     def error_calling_back(req_id, callback_url, err):
+        """Message formatter on an error while posting result to callback url
+
+        """
         return "Error while posting result to callback URL {} for request ID: {}; cause: {}".format(
             req_id, callback_url, format_exception(err))
 
     @staticmethod
     def received_request(url, remote_addr, json_body):
+        """Message when a call is received
+
+        """
         return "Received a call to {} from {} {}".format(url, remote_addr, json_body)
 
     @staticmethod
     def new_worker_thread(req_id, extra_msg=""):
+        """Message on invoking a new worker thread
+
+        """
         res = "Initiating new worker thread for request ID: {}".format(req_id)
         return res + extra_msg
 
     @staticmethod
     def inside_worker_thread(req_id, extra_msg=""):
+        """Message when inside a worker thread
+
+        """
         res = "Inside worker thread for request ID: {}".format(req_id)
         return res + extra_msg
 
     @staticmethod
     def processing(req_id, desc):
+        """Processing a request
+
+        """
         return "Processing request ID: {} -- {}".format(req_id, desc)
 
     @staticmethod
     def processed(req_id, desc):
+        """Processed the request
+
+        """
         return "Processed request ID: {} -- {}".format(req_id, desc)
 
     @staticmethod
     def error_while_processing(req_id, desc, err):
+        """Error while processing the request
+
+        """
         return "Error while processing request ID: {} -- {}; cause: {}".format(
             req_id, desc, format_exception(err))
 
     @staticmethod
     def creating_local_env(req_id):
+        """Creating a local environment
+
+        """
         return "Creating local environment request ID: {}".format(
             req_id)
 
     @staticmethod
     def error_local_env(req_id, desc, err):
+        """Error creating a local env
+
+        """
         return "Error while creating local environment for request ID: {} -- {}; cause: {}".format(
             req_id, desc, err.__traceback__)
 
     @staticmethod
     def inside_new_thread(req_id, extra_msg=""):
+        """Inside a new thread
+
+        """
         res = "Spinning up a new thread for request ID: {}".format(req_id)
         return res + " " + extra_msg
 
     @staticmethod
     def error_response_posting(req_id, desc, err):
+        """Error while posting a response
+
+        """
         return "Error while posting a response for a request ID: {} -- {}; cause: {}".format(
             req_id, desc, err.__traceback__)
 
     @staticmethod
     def received_http_response(resp):
-        """ """
+        """Received a http response
+
+        """
         return "Received response [code: {}, headers: {}, data: {}]".format(
             resp.status_code, resp.headers, resp.__dict__)
 
     @staticmethod
     def sending_response(req_id, desc):
+        """sending a response
+
+        """
         return "Response is sent for request ID: {} -- {}".format(
             req_id, desc)
 
     @staticmethod
     def listening_response(req_id, desc):
+        """Resposne is sent for a request ID
+
+        """
         return "Response is sent for request ID: {} -- {}".format(
             req_id, desc)
 
     @staticmethod
     def items_received(item_num, item_type, desc="Received"):
+        """Items received
+
+        """
         return "{} {} {}".format(desc, item_num, item_type)
 
     @staticmethod
     def items_sent(item_num, item_type, desc="Published"):
+        """items published
+
+        """
         return "{} {} {}".format(desc, item_num, item_type)
 
 
-MH = OOF_OSDFLogMessageFormatter
+MH = OOFOSDFLogMessageFormatter
 
-error_log, metrics_log, audit_log, debug_log = OOF_OSDFLogMessageHelper().get_handlers()
+error_log, metrics_log, audit_log, debug_log = OOFOSDFLogMessageHelper().get_handlers()
 
 
 def warn_audit_error(msg):
-    """Log the message to error_log.warn and audit_log.warn"""
+    """Log the message to error_log.warn and audit_log.warn
+
+    """
     log_message_multi(msg, audit_log.warn, error_log.warn)
 
 
 def log_message_multi(msg, *logger_methods):
     """Log the msg to multiple loggers
+
     :param msg: message to log
     :param logger_methods: e.g. error_log.warn, audit_log.warn, etc.
     """
index 2f678fa..08af3e4 100644 (file)
 # -------------------------------------------------------------------------
 #
 
-import itertools
 from collections import defaultdict
+import itertools
 
-from dateutil import tz
 from dateutil.parser import parse
+from dateutil import tz
 
 
 def tuples_to_multi_val_dict(kvw_tuples, colnums=(0, 1)):
     """Given a list of k,v tuples, get a dictionary of the form k -> [v1,v2,...,vn]
+
     :param kvw_tuples: list of k,v,w tuples (e.g. [(k1,v1,a1), (k2,v2,a2), (k1,v3,a3), (k1,v4,a4)]
     :param colnums: column numbers
     :return: a dict of str:set, something like {k1: {v1, v3, v4}, k2: {v2}} or {k1: {a1, a3, a4}, k2: {a2}}
@@ -38,6 +39,7 @@ def tuples_to_multi_val_dict(kvw_tuples, colnums=(0, 1)):
 
 def tuples_to_dict(kvw_tuples, colnums=(0, 1)):
     """Given a list of k,v tuples, get a dictionary of the form k -> v
+
     :param kvw_tuples: list of k,v,w tuples (e.g. [(k1,v1,a1), (k2,v2,a2), (k3,v3,a3), (k1,v4,a4)]
     :param colnums: column numbers
     :return: a dict; something like {k1: v4, k2: v2, k3: v3} (note, k1 is repeated, so last val is retained)
@@ -46,13 +48,17 @@ def tuples_to_dict(kvw_tuples, colnums=(0, 1)):
 
 
 def utc_time_from_ts(timestamp):
-    """Return corresponding UTC timestamp for a given ISO timestamp (or anything that parse accepts)"""
+    """Return corresponding UTC timestamp for a given ISO timestamp (or anything that parse accepts)
+
+    """
     return parse(timestamp).astimezone(tz.tzutc()).strftime('%Y-%m-%d %H:%M:%S')
 
 
-def list_flatten(l):
-    """Flatten a complex nested list of nested lists into a flat list"""
-    return itertools.chain(*[list_flatten(j) if isinstance(j, list) else [j] for j in l])
+def list_flatten(_l):
+    """Flatten a complex nested list of nested lists into a flat list
+
+    """
+    return itertools.chain(*[list_flatten(j) if isinstance(j, list) else [j] for j in _l])
 
 
 text_to_symbol = {
@@ -60,3 +66,10 @@ text_to_symbol = {
     'less': "<",
     'equal': "="
 }
+
+
+def decode_data(data):
+    """Decode bytes to string
+
+    """
+    return data.decode(encoding='utf-8') if isinstance(data, bytes) else data
index 14b726d..c74a161 100644 (file)
@@ -1,5 +1,5 @@
 # -------------------------------------------------------------------------
-#   Copyright (c) 2019 AT&T Intellectual Property
+#   Copyright (c) 2020 AT&T Intellectual Property
 #
 #   Licensed under the Apache License, Version 2.0 (the "License");
 #   you may not use this file except in compliance with the License.
 
 import socket
 import threading
+import time
 import uuid
 
+from flask import g
 from onaplogging.mdcContext import MDC
 
+EMPTY = "EMPTY"
+
+DATE_FMT = '%Y-%m-%dT%H:%M:%S'
+
 
 def default_server_info():
+    """Populate server & server_ip_address MDC fields
+
+    """
     # If not set or purposely set = None, then set default
     if MDC.get('server') is None:
         try:
             server = socket.getfqdn()
-        except Exception as err:
+        except Exception:
             try:
                 server = socket.gethostname()
-            except Exception as err:
+            except Exception:
                 server = ''
         MDC.put('server', server)
     if MDC.get('serverIPAddress') is None:
@@ -43,26 +52,104 @@ def default_server_info():
 
 
 def default_mdc():
-    MDC.put('instanceUUID', uuid.uuid1())
+    """Populate default MDC fields
+
+    """
+    MDC.put('instanceUUID', generate_uuid())
+    MDC.put('InvocationID', generate_uuid())
     MDC.put('serviceName', 'OOF_OSDF')
     MDC.put('threadID', threading.currentThread().getName())
     default_server_info()
-    MDC.put('requestID', 'N/A')
+    MDC.put('requestID', generate_uuid())
     MDC.put('partnerName', 'N/A')
+    MDC.put('entryTimestamp', get_time())
 
 
 def mdc_from_json(request_json):
-    default_mdc()
+    """Populate MDC fields given a request in json format
+
+    """
+    if MDC.get("instanceUUID") is None:
+        default_mdc()
     MDC.put('requestID', get_request_id(request_json))
-    MDC.put('partnerName', request_json['requestInfo']['sourceId'])
+    MDC.put('partnerName', get_partner_name(request_json))
+
+
+def populate_default_mdc(request):
+    """Populate default MDC fields given the request
+
+    """
+    if MDC.get("instanceUUID") is None:
+        default_mdc()
+        g.request_start = time.process_time()
+        g.empty_value = "EMPTY"
+        g.request_id = MDC.get("requestID")
+    MDC.put('serviceName', request.path)
+    MDC.put('IPAddress', request.headers.get('X-Forwarded-For', request.remote_addr))
+
+
+def populate_mdc(request):
+    """Populate MDC fields from the request headers
+
+    """
+    populate_default_mdc(request)
+    req_id = request.headers.get('X-ONAP-RequestID', g.empty_value)
+    request_json = request.get_json()
+    if req_id == g.empty_value:
+        req_id = get_request_id(request_json)
+    g.request_id = req_id
+    MDC.put('requestID', req_id)
+    MDC.put('partnerName', get_partner_name(request_json))
 
 
 def get_request_id(request_json):
+    """Get the request_id from the request
+
+    """
     request_id = request_json['requestInfo'].get('requestId')
     if not request_id:
         request_id = request_json['requestInfo'].get('requestID')
     return request_id
 
 
+def generate_uuid():
+    """Generate an unique uuid
+
+    """
+    return f'{uuid.uuid1()}'
+
+
+def get_partner_name(request_json):
+    """Get the partnerName
+
+    """
+    partner_name = EMPTY
+    if 'requestInfo' in request_json:
+        partner_name = request_json['requestInfo'].get('sourceId', EMPTY)
+    return partner_name
+
+
 def clear_mdc():
+    """Clear MDC
+
+    """
     MDC.clear()
+
+
+def get_time():
+    """Generate the invocation time string
+
+    The dateformat is %Y-%m-%dT%H:%M:%S.sss
+    """
+    ct = time.time()
+    lt = time.gmtime(ct)
+    msec = int((ct - int(ct)) * 1000)
+    return f'{time.strftime(DATE_FMT, lt)}.{msec:0>3}'
+
+
+def set_error_details(code, desc):
+    """set errorCode and description
+
+    """
+    MDC.put('errorCode', code)
+    MDC.put('errorDescription', desc)
index fd87333..b0492f2 100644 (file)
 import json
 import traceback
 
+from flask import Flask
+from flask import g
+from flask import Response
 import mysql.connector
-from flask import g, Flask, Response
 
 from osdf.config.base import osdf_config
-from osdf.logging.osdf_logging import debug_log, error_log
+from osdf.logging.osdf_logging import debug_log
+from osdf.logging.osdf_logging import error_log
 from osdf.operation.exceptions import BusinessException
+from osdf.utils.data_conversion import decode_data
 
 
 def init_db():
@@ -33,20 +37,22 @@ def init_db():
 
 
 def get_db():
-    """Opens a new database connection if there is none yet for the
-        current application context. 
+    """Opens a new database connection if there is none yet for the current application context.
+
     """
     if not hasattr(g, 'pg'):
         properties = osdf_config['deployment']
-        host, db_port, db = properties["osdfDatabaseHost"], properties["osdfDatabasePort"], \
-                            properties.get("osdfDatabaseSchema")
+        host, db_port, db = properties["osdfDatabaseHost"], properties["osdfDatabasePort"], properties.get(
+            "osdfDatabaseSchema")
         user, password = properties["osdfDatabaseUsername"], properties["osdfDatabasePassword"]
         g.pg = mysql.connector.connect(host=host, port=db_port, user=user, password=password, database=db)
     return g.pg
 
 
 def close_db():
-    """Closes the database again at the end of the request."""
+    """Closes the database again at the end of the request.
+
+    """
     if hasattr(g, 'pg'):
         g.pg.close()
 
@@ -109,7 +115,7 @@ def build_model_dict(resp_data, content_needed=True):
     resp = {'modelId': resp_data[0], 'description': resp_data[2] if resp_data[2] else '',
             'solver': resp_data[3]}
     if content_needed:
-        resp.update({'modelContent': resp_data[1]})
+        resp.update({'modelContent': decode_data(resp_data[1])})
     return resp
 
 
@@ -124,7 +130,6 @@ def delete_model_data(model_id):
     with app.app_context():
         try:
             debug_log.debug("deleting model data given model_id = {}".format(model_id))
-            d = dict();
             connection = get_db()
             cursor = connection.cursor(buffered=True)
             query = "delete from optim_model_data WHERE model_id = %s"
@@ -146,10 +151,11 @@ def get_model_data(model_id):
     with app.app_context():
         try:
             debug_log.debug("getting model data given model_id = {}".format(model_id))
-            d = dict();
+            d = dict()
             connection = get_db()
             cursor = connection.cursor(buffered=True)
-            query = "SELECT model_id, model_content, description, solver_type  FROM optim_model_data WHERE model_id = %s"
+            query = "SELECT model_id, model_content, description, " \
+                    "solver_type  FROM optim_model_data WHERE model_id = %s"
             values = (model_id,)
             cursor.execute(query, values)
             if cursor is None:
@@ -194,7 +200,7 @@ def get_all_models():
             connection = get_db()
             cursor = connection.cursor(buffered=True)
             query = "SELECT model_id, model_content, description, solver_type  FROM optim_model_data"
-    
+
             cursor.execute(query)
             if cursor is None:
                 return 400, "FAILED"
index 4a8788e..b303bbf 100644 (file)
 from flask import Response
 
 from osdf.operation.exceptions import BusinessException
-from .model_api import get_model_data
-from .models.api.optim_request import OptimizationAPI
-from .solvers.mzn.mzn_solver import solve as mzn_solve
-from .solvers.py.py_solver import solve as py_solve
+from osdf.utils.data_conversion import decode_data
+from runtime.model_api import get_model_data
+from runtime.models.api.optim_request import OptimizationAPI
+from runtime.solvers.mzn.mzn_solver import solve as mzn_solve
+from runtime.solvers.py.py_solver import solve as py_solve
 
 
 def is_valid_optim_request(request_json):
@@ -69,7 +70,7 @@ def get_model_content(request_json):
     if model_id:
         status, data = get_model_data(model_id)
         if status == 200:
-            model_content = data[1]
+            model_content = decode_data(data[1])
             solver = data[3]
         else:
             raise BusinessException('model_id [{}] not found in the model database'.format(model_id))
index cf002e7..f3daa2b 100644 (file)
 # -------------------------------------------------------------------------
 #
 
-import json
 from datetime import datetime
+import json
 
-from pymzn import Status, minizinc, cbc, gecode, chuffed, or_tools
+from pymzn import cbc
+from pymzn import chuffed
+from pymzn import gecode
+from pymzn import minizinc
+from pymzn import or_tools
+from pymzn import Status
 
 from osdf.utils.file_utils import delete_file_folder
 
@@ -47,6 +52,10 @@ def map_status(status):
 
 
 def solve(request_json, mzn_content):
+    """Given the request and minizinc content.  Translates the json request to the format minizinc understands
+
+    return: returns the optimized solution.
+    """
     req_info = request_json['requestInfo']
     opt_info = request_json['optimInfo']
     try:
@@ -71,6 +80,9 @@ def solve(request_json, mzn_content):
 
 
 def mzn_solver(mzn_content, opt_info):
+    """Calls the minizinc optimizer.
+
+    """
     args = opt_info['solverArgs']
     solver = get_mzn_solver(args.pop('solver'))
     mzn_opts = dict()
@@ -85,18 +97,36 @@ def mzn_solver(mzn_content, opt_info):
 
 
 def persist_opt_data(opt_info):
+    """Persist the opt data, if included as part of the request.
+
+    return: file_name path of the optim_data
+            returns None if no optData is part of the request
+    """
+    file_name = None
+    if 'optData' in opt_info:
+        if opt_info['optData'].get('json'):
+            data_content = json.dumps(opt_info['optData']['json'])
+            file_name = '/tmp/optim_engine_{}.json'.format(datetime.timestamp(datetime.now()))
+            persist_data(data_content, file_name)
+        elif opt_info['optData'].get('text'):
+            data_content = opt_info['optData']['text']
+            file_name = '/tmp/optim_engine_{}.dzn'.format(datetime.timestamp(datetime.now()))
+            persist_data(data_content, file_name)
+    return file_name
+
 
-    if opt_info['optData'].get('json'):
-        data_content = json.dumps(opt_info['optData']['json'])
-        file_name = '/tmp/optim_engine_{}.json'.format(datetime.timestamp(datetime.now()))
-    elif opt_info['optData'].get('text'):
-        data_content = opt_info['optData']['text']
-        file_name = '/tmp/optim_engine_{}.dzn'.format(datetime.timestamp(datetime.now()))
+def persist_data(data_content, file_name):
+    """Save the dzn data into a file
 
+    """
     with open(file_name, "wt") as data:
         data.write(data_content)
-    return file_name
 
 
 def get_mzn_solver(solver):
+    """Returns a solver type object for minizinc optimizers
+
+    solver: solver that is part of the request
+    return: solver mapped object
+    """
     return solver_dict.get(solver)
index 0b8815f..ad0de21 100644 (file)
@@ -3,19 +3,19 @@ disable_existing_loggers: True
 
 loggers:
   error:
-    handlers: [error_handler]
+    handlers: [error_handler, console_handler]
     level: "WARN"
     propagate: True
   debug:
-    handlers: [debug_handler]
+    handlers: [debug_handler, console_handler]
     level: "DEBUG"
     propagate: True
   metrics:
-    handlers: [metrics_handler]
+    handlers: [metrics_handler, console_handler]
     level: "INFO"
     propagate: True
   audit:
-    handlers: [audit_handler]
+    handlers: [audit_handler, console_handler]
     level: "INFO"
     propagate: True
 handlers:
@@ -59,37 +59,42 @@ handlers:
     utc: True
     delay: False
     backupCount: 10
+  console_handler:
+    level: "DEBUG"
+    class: "logging.StreamHandler"
+    formatter: "metricsFormat"
+
 formatters:
   standard:
     format: "%(asctime)s|||||%(name)s||%(thread)||%(funcName)s||%(levelname)s||%(message)s"
   debugFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {threadID} {serverName} {serviceName} {instanceUUID} {upperLogLevel} {severity} {serverIPAddress} {server} {IPAddress} {className} {timer} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "%(asctime)s.%(msecs)03d+00:00|{requestID}|%(threadName)s|{server}|%(levelname)s|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   errorFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {threadID} {serviceName} {partnerName} {targetEntity} {targetServiceName} {errorCode} {errorDescription} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "%(asctime)s.%(msecs)03d+00:00|{requestID}|%(threadName)s|{serviceName}|{partnerName}\
+    |{targetEntity}|{targetServiceName}|%(levelname)s|{errorCode}|{errorDescription}|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   auditFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {serviceInstanceID} {threadID} {serverName} {serviceName} {partnerName} {statusCode} {responseCode} {responseDescription} {instanceUUID} {upperLogLevel} {severity} \
-             {serverIPAddress} {timer} {server} {IPAddress} {className} {unused} {processKey} {customField1} {customField2} {customField3} {customField4} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "{entryTimestamp}+00:00|%(asctime)s.%(msecs)03d+00:00|{requestID}|{serviceInstanceID}\
+        |%(threadName)s|{server}|{serviceName}|{partnerName}|{statusCode}|{responseCode}|{responseDescription}\
+        |{instanceUUID}|%(levelname)s|{severity}|{serverIPAddress}|{timer}|{server}|{IPAddress}||{unused}\
+        |{processKey}|{customField1}|{customField2}|{customField3}|{customField4}|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   metricsFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {serviceInstanceID} {threadID} {serverName} {serviceName} {partnerName} \
-             {targetEntity} {targetServiceName} {statusCode} {responseCode} {responseDescription} \
-             {instanceUUID} {upperLogLevel} {severity} {serverIPAddress} {timer} {server} {IPAddress} \
-             {className} {unused} {processKey} {targetVirtualEntity} {customField1} {customField2} \
-             {customField3} {customField4} {detailMessage}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
-
+    format: "%(mdc)s"
+    datefmt: "%Y-%m-%dT%H:%M:%S"
+    mdcfmt: "{entryTimestamp}+00:00|%(asctime)s.%(msecs)03d+00:00|{requestID}|{serviceInstanceID}\
+        |%(threadName)s|{server}|{serviceName}|{partnerName}|{targetEntity}|{targetServiceName}|{statusCode}|{responseCode}|{responseDescription}\
+        |{instanceUUID}|%(levelname)s|{severity}|{serverIPAddress}|{timer}|{server}|{IPAddress}||{unused}\
+        |{processKey}|{TargetVirtualEntity}|{customField1}|{customField2}|{customField3}|{customField4}|%(message)s"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
   mdcFormat:
-    format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
-    mdcfmt: "{requestID} {invocationID} {serviceName} {serviceIP}"
-    datefmt: "%Y-%m-%d %H:%M:%S"
-    (): onaplogging.mdcformatter.MDCFormatter
+    format: "%(asctime)s.%(msecs)03d+00:00|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+    mdcfmt: "{requestID} {invocationID} {serviceName} {serverIPAddress}"
+    (): osdf.logging.oof_mdc_formatter.OOFMDCFormatter
index 50689dd..c8fa3ae 100755 (executable)
 # -------------------------------------------------------------------------
 #
 import unittest
-import json
-import yaml
+from unittest import mock
 
 from osdf.logging import osdf_logging as L1
-from osdf.logging.osdf_logging import OOF_OSDFLogMessageHelper as MH
-from osdf.logging.osdf_logging import OOF_OSDFLogMessageFormatter as formatter
-from unittest import mock
+from osdf.logging.osdf_logging import OOFOSDFLogMessageFormatter as formatter
+from osdf.logging.osdf_logging import OOFOSDFLogMessageHelper as MH
 
 
 class TestOSDFLogging(unittest.TestCase):
@@ -46,7 +44,7 @@ class TestOSDFLogging(unittest.TestCase):
         self.F = formatter
 
     def test_format_exception(self):
-        res = L1.format_exception(Exception("Some error"))
+        L1.format_exception(Exception("Some error"))
 
     def test_accepted_valid_request(self):
         res = formatter.accepted_valid_request(self.req_id, self.request)
@@ -91,7 +89,7 @@ class TestOSDFLogging(unittest.TestCase):
     def test_error_calling_back(self):
         res = formatter.error_calling_back(self.service_name, self.callback_url, self.err)
         assert res.startswith("Error while posting result to callback URL")
-        
+
     def test_calling_back(self):
         res = formatter.calling_back(self.req_id, self.callback_url)
         assert res.startswith("Posting result to callback URL")
@@ -167,4 +165,3 @@ class TestOSDFLogging(unittest.TestCase):
 
 if __name__ == "__main__":
     unittest.main()
-
index d1f6d7e..e4f12e6 100644 (file)
 #
 
 import unittest
+from unittest import TestCase
+from unittest import mock
+from unittest.mock import patch
+
+from requests import Request
+from requests import RequestException
+from schematics.exceptions import DataError
 
 from osdf.apps import baseapp
+from osdf.apps.baseapp import app
 from osdf.operation.exceptions import BusinessException
-from requests import Request, RequestException
-from schematics.exceptions import DataError
-from unittest import mock, TestCase
-from unittest.mock import patch
 
 
 class TestOSDFApp(TestCase):
 
     def setUp(self):
-        self.patcher_g = patch('osdf.apps.baseapp.g', return_value={'request_id':'DUMMY-REQ'})
-        self.Mock_g = self.patcher_g.start()
+        with app.app_context():
+            self.patcher_g = patch('osdf.apps.baseapp.g', return_value={'request_id': 'DUMMY-REQ'})
+            self.Mock_g = self.patcher_g.start()
         # self.patcher2 = patch('package.module.Class2')
         # self.MockClass2 = self.patcher2.start()
 
@@ -45,7 +50,7 @@ class TestOSDFApp(TestCase):
         e.response.content = "Some request exception occurred"
         # request().raise_for_status.side_effect = e
         return e
+
     def test_handle_business_exception(self):
         e = BusinessException("Business Exception Description")
         resp = baseapp.handle_business_exception(e)
@@ -67,9 +72,8 @@ class TestOSDFApp(TestCase):
         assert resp.status_code == 500
 
     def test_get_options_default(self):
-        opts = baseapp.get_options(["PROG"])  # ensure nothing breaks
+        baseapp.get_options(["PROG"])  # ensure nothing breaks
 
 
 if __name__ == "__main__":
     unittest.main()
-