[PMSH] Replace own logging implementation with pylog 43/107543/9
authorERIMROB <robertas.rimkus@est.tech>
Tue, 12 May 2020 11:56:56 +0000 (12:56 +0100)
committerERIMROB <robertas.rimkus@est.tech>
Fri, 5 Jun 2020 09:11:06 +0000 (10:11 +0100)
Signed-off-by: ERIMROB <robertas.rimkus@est.tech>
Issue-ID: DCAEGEN2-2155
Change-Id: I670c4fff8029a73075b651c2afe6237c08cf907c

22 files changed:
components/pm-subscription-handler/Changelog.md
components/pm-subscription-handler/Dockerfile
components/pm-subscription-handler/log_config.yaml [new file with mode: 0755]
components/pm-subscription-handler/pmsh_service/mod/__init__.py
components/pm-subscription-handler/pmsh_service/mod/aai_client.py
components/pm-subscription-handler/pmsh_service/mod/aai_event_handler.py
components/pm-subscription-handler/pmsh_service/mod/exit_handler.py
components/pm-subscription-handler/pmsh_service/mod/network_function.py
components/pm-subscription-handler/pmsh_service/mod/pmsh_logging.py [deleted file]
components/pm-subscription-handler/pmsh_service/mod/pmsh_utils.py
components/pm-subscription-handler/pmsh_service/mod/policy_response_handler.py
components/pm-subscription-handler/pmsh_service/mod/subscription.py
components/pm-subscription-handler/pmsh_service/mod/subscription_handler.py
components/pm-subscription-handler/pmsh_service/pmsh_service_main.py
components/pm-subscription-handler/setup.py
components/pm-subscription-handler/tests/log_config.yaml [new file with mode: 0755]
components/pm-subscription-handler/tests/test_aai_service.py
components/pm-subscription-handler/tests/test_controller.py
components/pm-subscription-handler/tests/test_network_function.py
components/pm-subscription-handler/tests/test_pmsh_utils.py
components/pm-subscription-handler/tests/test_subscription.py
components/pm-subscription-handler/tests/test_subscription_handler.py

index 2ec36fb..a624761 100755 (executable)
@@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
 ### Changed
 * Added new API endpoint to fetch all Subscription data (DCAEGEN2-2154)
 * Added support for config-binding-docker module in PMSH config fetch (DCAEGEN2-2156)
+* Replaced logging implementation with onappylog module (DCAEGEN2-2155)
 
 ## [1.0.3]
 ### Fixed
index 8eed60b..270b97a 100644 (file)
@@ -25,7 +25,8 @@ ENV PMSHUSER=pmsh \
     PATH=/usr/local/lib/python3.7/bin:$PATH:$APPDIR/bin \
     PYTHONPATH=/usr/local/lib/python3.7/site-packages:./mod:./:$PYTHONPATH:$APPDIR/bin \
     REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt \
-    LOGS_PATH="/var/log/ONAP/dcaegen2/services/pmsh"
+    LOGS_PATH="/var/log/ONAP/dcaegen2/services/pmsh" \
+    LOGGER_CONFIG=/opt/app/pmsh/log_config.yaml
 
 WORKDIR $APPDIR
 
@@ -38,6 +39,7 @@ RUN addgroup --system $PMSHUSER && adduser --ingroup $PMSHUSER --system $PMSHUSE
 COPY setup.py ./
 COPY requirements.txt ./
 COPY ./pmsh_service ./bin/
+COPY log_config.yaml /opt/app/pmsh/
 
     # run the pip install
 RUN pip install --upgrade pip && \
diff --git a/components/pm-subscription-handler/log_config.yaml b/components/pm-subscription-handler/log_config.yaml
new file mode 100755 (executable)
index 0000000..971c188
--- /dev/null
@@ -0,0 +1,27 @@
+version: 1\r
+\r
+disable_existing_loggers: true\r
+\r
+loggers:\r
+  onap_logger:\r
+    level: INFO\r
+    handlers: [onap_log_handler, stdout_handler]\r
+    propagate: false\r
+handlers:\r
+  onap_log_handler:\r
+    class: logging.handlers.RotatingFileHandler\r
+    filename: /var/log/ONAP/dcaegen2/services/pmsh/application.log\r
+    mode: a\r
+    maxBytes: 10000000\r
+    backupCount: 10\r
+    formatter: mdcFormatter\r
+  stdout_handler:\r
+    class: logging.StreamHandler\r
+    formatter: mdcFormatter\r
+formatters:\r
+  mdcFormatter:\r
+    format: '%(asctime)s | %(threadName)s | %(thread)d | %(levelname)s | %(module)s\r
+      | %(funcName)s | %(mdc)s | %(message)s'\r
+    mdcfmt: '{ServiceName} | {RequestID} | {InvocationID}'\r
+    datefmt: '%Y-%m-%dT%H:%M:%S%z'\r
+    (): onaplogging.mdcformatter.MDCFormatter\r
index 5c0a514..316687c 100644 (file)
 #
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=====================================================
+import logging as logging
 import os
+import pathlib
 from urllib.parse import quote
 
 from connexion import App
 from flask_sqlalchemy import SQLAlchemy
-
-import mod.pmsh_logging as logger
+from onaplogging import monkey
+from onaplogging.mdcContext import MDC
+from ruamel.yaml import YAML
 
 db = SQLAlchemy()
 basedir = os.path.abspath(os.path.dirname(__file__))
 _connexion_app = None
+logger = logging.getLogger('onap_logger')
 
 
 def _get_app():
@@ -43,7 +47,7 @@ def launch_api_server(app_config):
 
 
 def create_app():
-    logger.create_loggers(os.getenv('LOGS_PATH'))
+    create_logger()
     connex_app = _get_app()
     app = connex_app.app
     app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
@@ -53,6 +57,31 @@ def create_app():
     return app
 
 
+def create_logger():
+    config_file_path = os.getenv('LOGGER_CONFIG')
+    update_config(config_file_path)
+    monkey.patch_loggingYaml()
+    logging.config.yamlConfig(filepath=config_file_path,
+                              watchDog=os.getenv('DYNAMIC_LOGGER_CONFIG', True))
+    old_record = logging.getLogRecordFactory()
+
+    def augment_record(*args, **kwargs):
+        new_record = old_record(*args, **kwargs)
+        new_record.mdc = MDC.result()
+        return new_record
+
+    logging.setLogRecordFactory(augment_record)
+
+
+def update_config(config_file_path):
+    config_yaml = YAML()
+    config_file = pathlib.Path(config_file_path)
+    data = config_yaml.load(config_file)
+    data['handlers']['onap_log_handler']['filename'] = \
+        f'{os.getenv("LOGS_PATH")}/application.log'
+    config_yaml.dump(data, config_file)
+
+
 def get_db_connection_url():
     pg_host = os.getenv('PMSH_PG_URL')
     pg_user = os.getenv('PMSH_PG_USERNAME')
index 5e71da4..371fdb0 100755 (executable)
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=====================================================
 import json
-import uuid
 from os import environ
 
 import requests
 from requests.auth import HTTPBasicAuth
 
-import mod.pmsh_logging as logger
+from mod import logger
+from mod.pmsh_utils import mdc_handler
 from mod.network_function import NetworkFunction, NetworkFunctionFilter
 from mod.subscription import Subscription
 
@@ -50,7 +50,8 @@ def get_pmsh_subscription_data(cbs_data):
     return sub, nfs
 
 
-def _get_all_aai_nf_data():
+@mdc_handler
+def _get_all_aai_nf_data(**kwargs):
     """
     Return queried nf data from the AAI service.
 
@@ -61,10 +62,14 @@ def _get_all_aai_nf_data():
     try:
         session = requests.Session()
         aai_endpoint = f'{_get_aai_service_url()}{"/aai/v16/query"}'
+        logger.info('Fetching XNFs from AAI.')
         headers = {'accept': 'application/json',
                    'content-type': 'application/json',
                    'x-fromappid': 'dcae-pmsh',
-                   'x-transactionid': str(uuid.uuid1())}
+                   'x-transactionid': kwargs['request_id'],
+                   'InvocationID': kwargs['invocation_id'],
+                   'RequestID': kwargs['request_id']
+                   }
         json_data = """
                     {'start':
                         ['network/pnfs',
@@ -77,8 +82,10 @@ def _get_all_aai_nf_data():
         response.raise_for_status()
         if response.ok:
             nf_data = json.loads(response.text)
+            logger.info('Successfully fetched XNFs from AAI')
+            logger.debug(f'XNFs from AAI: {nf_data}')
     except Exception as e:
-        logger.debug(e)
+        logger.error(f'Failed to get XNFs from AAI: {e}')
     return nf_data
 
 
@@ -97,7 +104,7 @@ def _get_aai_service_url():
         aai_ssl_port = environ['AAI_SERVICE_PORT']
         return f'https://{aai_service}:{aai_ssl_port}'
     except KeyError as e:
-        logger.debug(f'Failed to get AAI env vars: {e}')
+        logger.error(f'Failed to get AAI env vars: {e}')
         raise
 
 
@@ -126,6 +133,6 @@ def _filter_nf_data(nf_data, nf_filter):
                     nf_name=nf['properties'].get(name_identifier),
                     orchestration_status=orchestration_status))
     except KeyError as e:
-        logger.debug(f'Failed to parse AAI data: {e}')
+        logger.error(f'Failed to parse AAI data: {e}')
         raise
     return nf_set
index 07de7d6..5aebb92 100755 (executable)
@@ -19,7 +19,7 @@
 import json
 from enum import Enum
 
-from mod import pmsh_logging as logger
+from mod import logger
 from mod.network_function import NetworkFunction, NetworkFunctionFilter
 from mod.subscription import AdministrativeState
 
@@ -66,11 +66,11 @@ def process_aai_events(mr_sub, subscription, mr_pub, app, app_conf):
 
 def _process_event(action, new_status, xnf_name, subscription, mr_pub, app_conf):
     if action == AAIEvent.UPDATE.value:
-        logger.debug(f'Update event found for network function {xnf_name}')
+        logger.info(f'Update event found for network function {xnf_name}')
         local_xnf = NetworkFunction.get(xnf_name)
 
         if local_xnf is None:
-            logger.debug(f'Activating subscription for network function {xnf_name}')
+            logger.info(f'Activating subscription for network function {xnf_name}')
             subscription.administrativeState = AdministrativeState.UNLOCKED.value
             subscription.process_subscription([NetworkFunction(
                 nf_name=xnf_name, orchestration_status=new_status)], mr_pub, app_conf)
@@ -78,9 +78,9 @@ def _process_event(action, new_status, xnf_name, subscription, mr_pub, app_conf)
             logger.debug(f"Update Event for network function {xnf_name} will not be processed "
                          f" as it's state is set to {local_xnf.orchestration_status}.")
     elif action == AAIEvent.DELETE.value:
-        logger.debug(f'Delete event found for network function {xnf_name}')
+        logger.info(f'Delete event found for network function {xnf_name}')
         NetworkFunction.delete(nf_name=xnf_name)
-        logger.debug(f'{xnf_name} successfully deleted.')
+        logger.info(f'{xnf_name} successfully deleted.')
 
 
 def _aai_event_exists(aai_events):
index 3cb05da..01cb8dc 100755 (executable)
@@ -16,7 +16,7 @@
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=====================================================
 
-from mod.pmsh_utils import logger
+from mod import logger
 from mod.subscription import AdministrativeState
 
 
@@ -35,6 +35,7 @@ class ExitHandler:
         self.subscription_handler = subscription_handler
 
     def __call__(self, sig_num, frame):
+        logger.info('Graceful shutdown of PMSH initiated.')
         logger.debug(f'ExitHandler was called with signal number: {sig_num}.')
         current_sub = self.subscription_handler.current_sub
         if current_sub and current_sub.administrativeState == AdministrativeState.UNLOCKED.value:
index 0663be0..aa39bf2 100755 (executable)
@@ -19,7 +19,7 @@
 import re
 from enum import Enum
 
-from mod import pmsh_logging as logger, db
+from mod import logger, db
 from mod.api.db_models import NetworkFunctionModel
 
 
@@ -53,7 +53,9 @@ class NetworkFunction:
                                           orchestration_status=self.orchestration_status)
             db.session.add(new_nf)
             db.session.commit()
+            logger.info(f'Network Function {new_nf.nf_name} successfully created.')
             return new_nf
+
         else:
             logger.debug(f'Network function {existing_nf.nf_name} already exists,'
                          f' returning this network function..')
diff --git a/components/pm-subscription-handler/pmsh_service/mod/pmsh_logging.py b/components/pm-subscription-handler/pmsh_service/mod/pmsh_logging.py
deleted file mode 100644 (file)
index e0a7e1b..0000000
+++ /dev/null
@@ -1,287 +0,0 @@
-# ============LICENSE_START===================================================
-#  Copyright (C) 2019-2020 Nordix Foundation.
-# ============================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-# ============LICENSE_END=====================================================
-import datetime
-import logging as log
-import sys
-from logging.handlers import RotatingFileHandler
-from os import makedirs
-
-# These loggers will be overwritten with EELF logging when running in Docker
-_AUDIT_LOGGER = log.getLogger("defaultlogger")
-_ERROR_LOGGER = log.getLogger("defaultlogger")
-_METRICS_LOGGER = log.getLogger("defaultlogger")
-
-# Set up debug logger
-_DEBUG_LOGGER = log.getLogger("defaultlogger")
-
-
-def _create_logger(name, logfile):
-    """
-    Create a RotatingFileHandler and a streamhandler for stdout
-    https://docs.python.org/3/library/logging.handlers.html
-    what's with the non-pythonic naming in these stdlib methods? Shameful.
-    """
-    logger = log.getLogger(name)
-    file_handler = RotatingFileHandler(logfile, maxBytes=10000000,  # 10 meg with one backup..,
-                                       backupCount=2)
-    formatter = log.Formatter("%(message)s")
-    file_handler.setFormatter(formatter)
-    logger.setLevel(log.DEBUG)
-    stdout_handler = log.StreamHandler(sys.stdout)
-    logger.addHandler(file_handler)
-    logger.addHandler(stdout_handler)
-    return logger
-
-
-# Public
-
-def get_module_logger(mod_name):
-    """
-    To use this, do logger = get_module_logger(__name__)
-    """
-    logger = log.getLogger(mod_name)
-    handler = log.StreamHandler()
-    formatter = log.Formatter("%(asctime)s "
-                              "[%(name)-12s] "
-                              "%(levelname)-8s "
-                              "%(message)s")
-    handler.setFormatter(formatter)
-    logger.addHandler(handler)
-    logger.setLevel(log.DEBUG)
-    return logger
-
-
-def create_loggers(logs_path=''):
-    """
-    Public method to set the global logger, launched from Run
-    This is *not* launched during unit testing, so unit tests do not
-    create/write log files
-    """
-    makedirs(logs_path, exist_ok=True)
-
-    # create the audit log
-    aud_file = logs_path + "/audit.log"
-    open(aud_file, "a").close()  # this is like "touch"
-    global _AUDIT_LOGGER
-    _AUDIT_LOGGER = _create_logger("pmsh_service_audit", aud_file)
-
-    # create the error log
-    err_file = logs_path + "/error.log"
-    open(err_file, "a").close()  # this is like "touch"
-    global _ERROR_LOGGER
-    _ERROR_LOGGER = _create_logger("pmsh_service_error", err_file)
-
-    # create the metrics log
-    met_file = logs_path + "/metrics.log"
-    open(met_file, "a").close()  # this is like "touch"
-    global _METRICS_LOGGER
-    _METRICS_LOGGER = _create_logger("pmsh_service_metrics", met_file)
-
-    # create the debug log
-    debug_file = logs_path + "/debug.log"
-    open(debug_file, "a").close()  # this is like "touch"
-    global _DEBUG_LOGGER
-    _DEBUG_LOGGER = _create_logger("pmsh_service_debug", debug_file)
-
-
-def utc():
-    """gets current time in utc"""
-    return datetime.datetime.utcnow()
-
-
-def debug(msg="n/a"):
-    """
-    This can be extended/modified to suit pmsh needs
-    """
-    ets = utc()
-
-    _DEBUG_LOGGER.debug(
-        "{ets}|{msg}".format(
-            ets=ets.isoformat(),
-            msg=msg,
-        )
-    )
-
-
-"""
-!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-These loggers can be modified to suit the pmsh functionality
-"""
-
-
-def audit(raw_request, bts, xer, rcode, calling_mod, msg="n/a"):
-    """
-    write an EELF error record per
-    'https://wiki.onap.org/download/attachments/1015849/
-    ONAP%20application%20logging%20guidelines.pdf?api=v2'
-
-    %The audit fields implemented:
-
-    1 BeginTimestamp        Implemented (bts)
-    2 EndTimestamp          Auto Injected when this is called
-    3 RequestID             Implemented (xer)
-    5 threadId              n/a
-    7 serviceName           Implemented (from Req)
-    9 StatusCode            Auto injected based on rcode
-    10 ResponseCode         Implemented (rcode)
-    13 Category log level - all audit records are INFO.
-    15 Server IP address    Implemented (from Req)
-    16 ElapsedTime          Auto Injected (milliseconds)
-    17 Server               This is running in a Docker container so this is
-                            not applicable, my HOSTNAME is always
-                            "config_binding_service"
-    18 ClientIPaddress      Implemented (from Req)
-    19 class name           Implemented (mod), though docs say OOP,
-                            I am using the python  module here
-    20 Unused               ...implemented....
-    21-25 Custom            n/a
-    26 detailMessage        Implemented (msg)
-
-    Not implemented
-    4 serviceInstanceID - ?
-    6 physical/virtual server name (Optional)
-    8 PartnerName - nothing in the request tells me this
-    11 Response Description - the CBS follows standard HTTP
-                              error codes so look them up
-    12 instanceUUID - Optional
-    14 Severity (Optional)
-    """
-    ets = utc()
-
-    _AUDIT_LOGGER.info(
-        "{bts}|{ets}|{xer}||n/a||{path}||{status}|{rcode}|||INFO||{servip}|{et}|"
-        "config_binding_service|{clientip}|{calling_mod}|||||||{msg}".format(
-            bts=bts.isoformat(),
-            ets=ets.isoformat(),
-            xer=xer,
-            rcode=rcode,
-            path=raw_request.path.split("/")[1],
-            status="COMPLETE" if rcode < 400 else "ERROR",
-            servip=raw_request.host.split(":")[0],
-            et=int((ets - bts).microseconds / 1000),
-            clientip=raw_request.remote_addr,
-            calling_mod=calling_mod,
-            msg=msg,
-        )
-    )
-
-
-def error(raw_request, xer, severity, ecode, tgt_entity="n/a",
-          tgt_path="n/a", msg="n/a", adv_msg="n/a"):
-    """
-    write an EELF error record per
-    'https://wiki.onap.org/download/attachments/1015849/
-    ONAP%20application%20logging%20guidelines.pdf?api=v2'
-
-    the error fields implemented:
-
-    1 Timestamp          Auto Injected when this is called
-    2 RequestID          Implemented (xer)
-    3 ThreadID           n/a
-    4 ServiceName        Implemented (from Req)
-    6 TargetEntity       Implemented (tgt_entity)
-    7 TargetServiceName Implemented (tgt_path)/
-    8 ErrorCategory      Implemented (severity)
-    9. ErrorCode         Implemented (ecode)
-    10 ErrorDescription  Implemented (msg)
-    11. detailMessage    Implemented (adv_msg)
-
-    Not implemented:
-    5 PartnerName - nothing in the request tells me this
-    """
-    ets = utc()
-
-    _ERROR_LOGGER.error(
-        "{ets}|{xer}|n/a|{path}||{tge}|{tgp}|{sev}|{ecode}|{msg}|{amsg}"
-        .format(
-            ets=ets,
-            xer=xer,
-            path=raw_request.path.split("/")[1],
-            tge=tgt_entity,
-            tgp=tgt_path,
-            sev=severity,
-            ecode=ecode,
-            msg=msg,
-            amsg=adv_msg,
-        )
-    )
-
-
-def metrics(raw_request, bts, xer, target, target_path, rcode,
-            calling_mod, msg="n/a"):
-    """
-    write an EELF error record per
-    'https://wiki.onap.org/download/attachments/1015849/
-    ONAP%20application%20logging%20guidelines.pdf?api=v2'
-
-    %The metrics fields implemented:
-
-    1 BeginTimestamp        Implemented (bts)
-    2 EndTimestamp          Auto Injected when this is called
-    3 RequestID             Implemented (xer)
-    5 threadId              n/a
-    7 serviceName           Implemented (from Req)
-    9 TargetEntity          Implemented (target)
-    10 TargetServiceName    Implemented (target_path)
-    11 StatusCode           Implemented (based on rcode)
-    12 Response Code        Implemented (rcode)
-    15 Category log level   all metrics records are INFO.
-    17 Server IP address    Implemented (from Req)
-    18 ElapsedTime          Auto Injected (milliseconds)
-    19 Server               This is running in a Docker container so this is
-                            not applicable, my HOSTNAME is always
-                            "config_binding_service"
-    20 ClientIPaddress      Implemented (from Req)
-    21 class name           Implemented (mod), though docs say OOP,
-                            I am using the python  module here
-    22 Unused               ...implemented....
-    24 TargetVirtualEntity  n/a
-    25-28 Custom            n/a
-    29 detailMessage        Implemented (msg)
-
-    Not implemented
-    4 serviceInstanceID - ?
-    6 physical/virtual server name (Optional)
-    8 PartnerName - nothing in the request tells me this
-    13 Response Description - the CBS follows standard HTTP error
-                              codes so look them up
-    14 instanceUUID - Optional
-    16 Severity (Optional)
-    23 ProcessKey - optional
-    """
-    ets = utc()
-
-    _METRICS_LOGGER.info(
-        "{bts}|{ets}|{xer}||n/a||{path}||{tge}|{tgp}|{status}|{rcode}|||INFO||{servip}|"
-        "{et}|config_binding_service|{clientip}|{calling_mod}|||n/a|||||{msg}"
-        .format(
-            bts=bts.isoformat(),
-            ets=ets.isoformat(),
-            xer=xer,
-            path=raw_request.path.split("/")[1],
-            tge=target,
-            tgp=target_path,
-            status="COMPLETE" if rcode < 400 else "ERROR",
-            rcode=rcode,
-            servip=raw_request.host.split(":")[0],
-            et=int((ets - bts).microseconds / 1000),
-            clientip=raw_request.remote_addr,
-            calling_mod=calling_mod,
-            msg=msg,
-        )
-    )
index 8db3c1f..01661ad 100755 (executable)
 #
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=====================================================
-
 import uuid
+from os import getenv
 from threading import Timer
 
 import requests
 from onap_dcae_cbs_docker_client.client import get_all
+from onaplogging.mdcContext import MDC
 from requests.auth import HTTPBasicAuth
 from tenacity import wait_fixed, stop_after_attempt, retry, retry_if_exception_type
 
-import mod.pmsh_logging as logger
+from mod import logger
+
+
+def mdc_handler(function):
+    def decorator(*args, **kwargs):
+        request_id = str(uuid.uuid1())
+        invocation_id = str(uuid.uuid1())
+        MDC.put('ServiceName', getenv('HOSTNAME'))
+        MDC.put('RequestID', request_id)
+        MDC.put('InvocationID', invocation_id)
+
+        kwargs['request_id'] = request_id
+        kwargs['invocation_id'] = invocation_id
+        return function(*args, **kwargs)
+    return decorator
 
 
 class ConfigHandler:
     """ Handles retrieval of PMSH's configuration from Configbinding service."""
     @staticmethod
+    @mdc_handler
     @retry(wait=wait_fixed(2), stop=stop_after_attempt(5), retry=retry_if_exception_type(Exception))
-    def get_pmsh_config():
+    def get_pmsh_config(**kwargs):
         """ Retrieves PMSH's configuration from Config binding service. If a non-2xx response
         is received, it retries after 2 seconds for 5 times before raising an exception.
 
@@ -42,11 +58,12 @@ class ConfigHandler:
             Exception: If any error occurred pulling configuration from Config binding service.
         """
         try:
+            logger.info('Fetching PMSH Configuration from CBS.')
             config = get_all()
-            logger.debug(f'PMSH config from CBS: {config}')
+            logger.info(f'Successfully fetched PMSH config from CBS: {config}')
             return config
         except Exception as err:
-            logger.debug(f'Failed to get config from CBS: {err}')
+            logger.error(f'Failed to get config from CBS: {err}')
             raise Exception
 
 
@@ -129,7 +146,8 @@ class _MrPub(_DmaapMrClient):
         self.pub_name = pub_name
         super().__init__(aaf_creds, **kwargs)
 
-    def publish_to_topic(self, event_json):
+    @mdc_handler
+    def publish_to_topic(self, event_json, **kwargs):
         """
         Publish the event to the DMaaP Message Router topic.
 
@@ -141,7 +159,10 @@ class _MrPub(_DmaapMrClient):
         """
         try:
             session = requests.Session()
-            headers = {'content-type': 'application/json', 'x-transactionId': str(uuid.uuid1())}
+            headers = {'content-type': 'application/json', 'x-transactionid': kwargs['request_id'],
+                       'InvocationID': kwargs['invocation_id'],
+                       'RequestID': kwargs['request_id']
+                       }
             response = session.post(self.topic_url, headers=headers,
                                     auth=HTTPBasicAuth(self.aaf_id, self.aaf_pass), json=event_json,
                                     verify=False)
@@ -171,7 +192,8 @@ class _MrSub(_DmaapMrClient):
         self.sub_name = sub_name
         super().__init__(aaf_creds, **kwargs)
 
-    def get_from_topic(self, consumer_id, consumer_group='dcae_pmsh_cg', timeout=1000):
+    @mdc_handler
+    def get_from_topic(self, consumer_id, consumer_group='dcae_pmsh_cg', timeout=1000, **kwargs):
         """
         Returns the json data from the MrTopic.
 
@@ -187,8 +209,10 @@ class _MrSub(_DmaapMrClient):
         topic_data = None
         try:
             session = requests.Session()
-            headers = {'accept': 'application/json', 'content-type': 'application/json'}
-            logger.debug(f'Request sent to MR topic: {self.topic_url}')
+            headers = {'accept': 'application/json', 'content-type': 'application/json',
+                       'InvocationID': kwargs['invocation_id'],
+                       'RequestID': kwargs['request_id']}
+            logger.debug(f'Fetching messages from MR topic: {self.topic_url}')
             response = session.get(f'{self.topic_url}/{consumer_group}/{consumer_id}'
                                    f'?timeout={timeout}',
                                    auth=HTTPBasicAuth(self.aaf_id, self.aaf_pass), headers=headers,
@@ -197,7 +221,7 @@ class _MrSub(_DmaapMrClient):
             if response.ok:
                 topic_data = response.json()
         except Exception as e:
-            logger.debug(e)
+            logger.error(f'Failed to fetch message from MR: {e}')
         return topic_data
 
 
index aa5a8cb..5ce0369 100644 (file)
@@ -20,11 +20,10 @@ import json
 
 from tenacity import retry, wait_fixed, retry_if_exception_type
 
-import mod.pmsh_logging as logger
+from mod import logger
 from mod.network_function import NetworkFunction
 from mod.subscription import Subscription, AdministrativeState, subscription_nf_states
 
-
 policy_response_handle_functions = {
     AdministrativeState.LOCKED.value: {
         'success': NetworkFunction.delete,
@@ -49,6 +48,7 @@ class PolicyResponseHandler:
         This method polls MR for response from policy. It checks whether the message is for the
         relevant subscription and then handles the response
         """
+        logger.info('Polling MR started for XNF activation/deactivation policy response events.')
         self.app.app_context().push()
         administrative_state = Subscription.get(self.subscription_name).status
         try:
@@ -74,8 +74,8 @@ class PolicyResponseHandler:
             nf_name (str): The network function name
             response_message (str): The message in the response regarding the state (success|failed)
         """
-        logger.debug(f'Response from MR: Sub: {subscription_name} for '
-                     f'NF: {nf_name} received, updating the DB')
+        logger.info(f'Response from MR: Sub: {subscription_name} for '
+                    f'NF: {nf_name} received, updating the DB')
         try:
             sub_nf_status = subscription_nf_states[administrative_state][response_message].value
             policy_response_handle_functions[administrative_state][response_message](
index 7517ba9..be217b1 100755 (executable)
@@ -19,8 +19,7 @@ from enum import Enum
 
 from tenacity import retry, retry_if_exception_type, wait_exponential, stop_after_attempt
 
-import mod.pmsh_logging as logger
-from mod import db
+from mod import db, logger
 from mod.api.db_models import SubscriptionModel, NfSubRelationalModel, NetworkFunctionModel
 from mod.network_function import NetworkFunction
 
@@ -125,6 +124,8 @@ class Subscription:
         except Exception as e:
             logger.debug(f'Failed to add nf {nf.nf_name} to subscription '
                          f'{current_sub.subscription_name}: {e}')
+            logger.debug(f'Subscription {current_sub.subscription_name} now contains these XNFs:'
+                         f'{Subscription.get_nfs_per_subscription(current_sub.subscription_name)}')
 
     @staticmethod
     def get(subscription_name):
@@ -148,6 +149,24 @@ class Subscription:
         """
         return SubscriptionModel.query.all()
 
+    @staticmethod
+    def get_nf_names_per_sub(subscription_name):
+        """ Retrieves a list of network function names related to the subscription
+
+        Args:
+            subscription_name (str): The subscription name
+
+        Returns:
+            list: List of network function names
+        """
+        nf_sub_rel = NfSubRelationalModel.query.filter(
+            NfSubRelationalModel.subscription_name == subscription_name).all()
+        list_of_nfs = []
+        for nf in nf_sub_rel:
+            list_of_nfs.append(nf.nf_name)
+
+        return list_of_nfs
+
     def update_subscription_status(self):
         """ Updates the status of subscription in subscription table """
         try:
@@ -187,6 +206,7 @@ class Subscription:
         self.update_subscription_status()
 
         if self.administrativeState == AdministrativeState.UNLOCKED.value:
+            logger.info(f'{action} subscription initiated for {self.subscriptionName}.')
             action = 'Activate'
             sub_nf_state = SubNfState.PENDING_CREATE.value
 
index 4d4c531..be67cae 100644 (file)
@@ -17,8 +17,8 @@
 # ============LICENSE_END=====================================================
 
 import mod.aai_client as aai
-import mod.pmsh_logging as logger
 from mod.pmsh_utils import ConfigHandler
+from mod import logger
 from mod.subscription import AdministrativeState
 
 
@@ -40,21 +40,22 @@ class SubscriptionHandler:
         self.app.app_context().push()
         config = ConfigHandler.get_pmsh_config()
         new_administrative_state = config['policy']['subscription']['administrativeState']
-
         try:
             if self.administrative_state == new_administrative_state:
-                logger.debug('Administrative State did not change in the Config')
+                logger.info('Administrative State did not change in the Config')
             else:
+                logger.info(f'Administrative State has changed from {self.administrative_state} '
+                            f'to {new_administrative_state}.')
                 self.current_sub, self.current_nfs = aai.get_pmsh_subscription_data(config)
                 self.administrative_state = new_administrative_state
                 self.current_sub.process_subscription(self.current_nfs, self.mr_pub, self.app_conf)
 
                 if new_administrative_state == AdministrativeState.UNLOCKED.value:
-                    logger.debug('Listening to AAI-EVENT topic in MR.')
+                    logger.info('Listening to AAI-EVENT topic in MR.')
                     self.aai_event_thread.start()
                 else:
-                    logger.debug('Stop listening to AAI-EVENT topic in MR.')
+                    logger.info('Stop listening to AAI-EVENT topic in MR.')
                     self.aai_event_thread.cancel()
 
         except Exception as err:
-            logger.debug(f'Error occurred during the activation/deactivation process {err}')
+            logger.error(f'Error occurred during the activation/deactivation process {err}')
index a2ba1fd..f1fb1e0 100755 (executable)
 #
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=====================================================
-
 import sys
 from signal import signal, SIGTERM
 
 import mod.aai_client as aai
-import mod.pmsh_logging as logger
-from mod import db, create_app, launch_api_server
+from mod import db, create_app, launch_api_server, logger
 from mod.aai_event_handler import process_aai_events
 from mod.exit_handler import ExitHandler
 from mod.pmsh_utils import AppConfig, PeriodicTask, ConfigHandler
@@ -35,7 +33,6 @@ def main():
         app = create_app()
         app.app_context().push()
         db.create_all(app=app)
-
         config = ConfigHandler.get_pmsh_config()
         app_conf = AppConfig(**config['config'])
 
@@ -66,7 +63,7 @@ def main():
         launch_api_server(app_conf)
 
     except Exception as e:
-        logger.debug(f'Failed to Init PMSH: {e}')
+        logger.error(f'Failed to initialise PMSH: {e}')
         sys.exit(e)
 
 
index cc09dc8..c8170b7 100644 (file)
@@ -36,5 +36,7 @@ setup(
         "Flask==1.1.1",
         "swagger-ui-bundle==0.0.6",
         "psycopg2-binary==2.8.4",
-        "onap_dcae_cbs_docker_client==2.1.0"]
+        "onap_dcae_cbs_docker_client==2.1.0",
+        "onappylog==1.0.9",
+        "ruamel.yaml==0.16.10"]
 )
diff --git a/components/pm-subscription-handler/tests/log_config.yaml b/components/pm-subscription-handler/tests/log_config.yaml
new file mode 100755 (executable)
index 0000000..5662070
--- /dev/null
@@ -0,0 +1,20 @@
+version: 1
+
+disable_existing_loggers: true
+
+loggers:
+  onap_logger:
+    level: INFO
+    handlers: [stdout_handler]
+    propagate: false
+handlers:
+  stdout_handler:
+    class: logging.StreamHandler
+    formatter: mdcFormatter
+formatters:
+  mdcFormatter:
+    format: '%(asctime)s | %(threadName)s | %(thread)d | %(levelname)s | %(module)s
+      | %(funcName)s | %(mdc)s | %(message)s'
+    mdcfmt: '{ServiceName} | {RequestID} | {InvocationID}'
+    datefmt: '%Y-%m-%dT%H:%M:%S%z'
+    (): onaplogging.mdcformatter.MDCFormatter
index 1ee7155..9a3a1b6 100644 (file)
@@ -25,18 +25,24 @@ import responses
 from requests import Session
 
 import mod.aai_client as aai_client
+from mod import create_app
 
 
 class AaiClientTestCase(TestCase):
 
-    def setUp(self):
+    @patch('mod.update_config')
+    @patch('mod.get_db_connection_url')
+    def setUp(self, mock_get_db_url, mock_update_config):
+        mock_get_db_url.return_value = 'sqlite://'
         self.env = EnvironmentVarGuard()
         self.env.set('AAI_SERVICE_HOST', '1.2.3.4')
         self.env.set('AAI_SERVICE_PORT', '8443')
+        self.env.set('LOGGER_CONFIG', os.path.join(os.path.dirname(__file__), 'log_config.yaml'))
         with open(os.path.join(os.path.dirname(__file__), 'data/cbs_data_1.json'), 'r') as data:
             self.cbs_data = json.load(data)
         with open(os.path.join(os.path.dirname(__file__), 'data/aai_xnfs.json'), 'r') as data:
             self.aai_response_data = data.read()
+        self.app = create_app()
 
     @patch.object(Session, 'put')
     def test_aai_client_get_pm_sub_data_success(self, mock_session):
index 8ef3946..8ca208a 100755 (executable)
@@ -29,9 +29,10 @@ from mod.network_function import NetworkFunction
 
 
 class ControllerTestCase(unittest.TestCase):
+    @patch('mod.update_config')
     @patch('mod.get_db_connection_url')
     @patch.object(Session, 'put')
-    def setUp(self, mock_session, mock_get_db_url):
+    def setUp(self, mock_session, mock_get_db_url, mock_update_config):
         mock_get_db_url.return_value = 'sqlite://'
         with open(os.path.join(os.path.dirname(__file__), 'data/aai_xnfs.json'), 'r') as data:
             self.aai_response_data = data.read()
@@ -40,8 +41,7 @@ class ControllerTestCase(unittest.TestCase):
         self.env = EnvironmentVarGuard()
         self.env.set('AAI_SERVICE_HOST', '1.2.3.4')
         self.env.set('AAI_SERVICE_PORT', '8443')
-        self.env.set('TESTING', 'True')
-        self.env.set('LOGS_PATH', './unit_test_logs')
+        self.env.set('LOGGER_CONFIG', os.path.join(os.path.dirname(__file__), 'log_config.yaml'))
         with open(os.path.join(os.path.dirname(__file__), 'data/cbs_data_1.json'), 'r') as data:
             self.cbs_data_1 = json.load(data)
         self.sub_1, self.xnfs = aai_client.get_pmsh_subscription_data(self.cbs_data_1)
index 138d99a..a5324bd 100755 (executable)
@@ -15,6 +15,7 @@
 #
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=====================================================
+import os
 from test.support import EnvironmentVarGuard
 from unittest import TestCase
 from unittest.mock import patch
@@ -26,13 +27,14 @@ from mod.subscription import Subscription
 
 class NetworkFunctionTests(TestCase):
 
+    @patch('mod.update_config')
     @patch('mod.get_db_connection_url')
-    def setUp(self, mock_get_db_url):
+    def setUp(self, mock_get_db_url, mock_update_config):
         mock_get_db_url.return_value = 'sqlite://'
         self.nf_1 = NetworkFunction(nf_name='pnf_1', orchestration_status='Inventoried')
         self.nf_2 = NetworkFunction(nf_name='pnf_2', orchestration_status='Active')
         self.env = EnvironmentVarGuard()
-        self.env.set('LOGS_PATH', './unit_test_logs')
+        self.env.set('LOGGER_CONFIG', os.path.join(os.path.dirname(__file__), 'log_config.yaml'))
         self.app = create_app()
         self.app_context = self.app.app_context()
         self.app_context.push()
index 236331b..0c3b2d2 100644 (file)
@@ -31,16 +31,18 @@ from mod.subscription import Subscription
 
 class PmshUtilsTestCase(TestCase):
 
+    @patch('mod.update_config')
     @patch('mod.create_app')
     @patch('mod.get_db_connection_url')
-    def setUp(self, mock_get_db_url, mock_app):
+    def setUp(self, mock_get_db_url, mock_app, mock_update_config):
         mock_get_db_url.return_value = 'sqlite://'
         with open(os.path.join(os.path.dirname(__file__), 'data/cbs_data_1.json'), 'r') as data:
             self.cbs_data = json.load(data)
         self.app_conf = AppConfig(**self.cbs_data['config'])
         self.sub = Subscription(**self.cbs_data['policy']['subscription'])
         self.env = EnvironmentVarGuard()
-        self.env.set('LOGS_PATH', './unit_test_logs')
+        self.env.set('TESTING', 'True')
+        self.env.set('LOGGER_CONFIG', os.path.join(os.path.dirname(__file__), 'log_config.yaml'))
         self.policy_mr_sub = self.app_conf.get_mr_sub('policy_pm_subscriber')
         self.mock_app = mock_app
         self.app = create_app()
index dc549c9..b50d9aa 100755 (executable)
@@ -33,12 +33,14 @@ from mod.subscription import Subscription
 
 
 class SubscriptionTest(TestCase):
+    @patch('mod.update_config')
     @patch('mod.pmsh_utils._MrPub')
     @patch('mod.pmsh_utils._MrSub')
     @patch('mod.get_db_connection_url')
     @patch.object(Session, 'put')
     @patch('pmsh_service_main.AppConfig')
-    def setUp(self, mock_app_config, mock_session, mock_get_db_url, mock_mr_sub, mock_mr_pub):
+    def setUp(self, mock_app_config, mock_session, mock_get_db_url,
+              mock_mr_sub, mock_mr_pub, mock_update_config):
         mock_get_db_url.return_value = 'sqlite://'
         with open(os.path.join(os.path.dirname(__file__), 'data/aai_xnfs.json'), 'r') as data:
             self.aai_response_data = data.read()
@@ -47,8 +49,7 @@ class SubscriptionTest(TestCase):
         self.env = EnvironmentVarGuard()
         self.env.set('AAI_SERVICE_HOST', '1.2.3.4')
         self.env.set('AAI_SERVICE_PORT', '8443')
-        self.env.set('TESTING', 'True')
-        self.env.set('LOGS_PATH', './unit_test_logs')
+        self.env.set('LOGGER_CONFIG', os.path.join(os.path.dirname(__file__), 'log_config.yaml'))
         with open(os.path.join(os.path.dirname(__file__), 'data/cbs_data_1.json'), 'r') as data:
             self.cbs_data_1 = json.load(data)
         with open(os.path.join(os.path.dirname(__file__),
@@ -103,6 +104,13 @@ class SubscriptionTest(TestCase):
 
         self.assertEqual(2, len(subs))
 
+    def test_get_nf_names_per_sub(self):
+        self.sub_1.create()
+        self.sub_1.add_network_function_to_subscription(self.nf_1)
+        self.sub_1.add_network_function_to_subscription(self.nf_2)
+        nfs = Subscription.get_nf_names_per_sub(self.sub_1.subscriptionName)
+        self.assertEqual(2, len(nfs))
+
     def test_create_existing_subscription(self):
         sub1 = self.sub_1.create()
         same_sub1 = self.sub_1.create()
@@ -117,7 +125,6 @@ class SubscriptionTest(TestCase):
         new_nf = NetworkFunction(nf_name='vnf_3', orchestration_status='Inventoried')
         self.sub_1.add_network_function_to_subscription(new_nf)
         nf_subs = Subscription.get_all_nfs_subscription_relations()
-        print(nf_subs)
         self.assertEqual(3, len(nf_subs))
 
     def test_add_duplicate_network_functions_per_subscription(self):
@@ -201,7 +208,6 @@ class SubscriptionTest(TestCase):
             expected_sub_event = json.load(data)
         app_conf = AppConfig(**self.cbs_data_1['config'])
         actual_sub_event = self.sub_1.prepare_subscription_event(self.nf_1.nf_name, app_conf)
-        print(actual_sub_event)
         self.assertEqual(expected_sub_event, actual_sub_event)
 
     def test_get_nf_models(self):
index d922b96..3eb12bc 100644 (file)
@@ -45,7 +45,7 @@ class SubscriptionHandlerTest(TestCase):
         self.nf_2 = NetworkFunction(nf_name='pnf_2')
         self.nfs = [self.nf_1, self.nf_2]
 
-    @patch('mod.pmsh_logging.debug')
+    @patch('mod.logger.info')
     @patch('mod.aai_client.get_pmsh_subscription_data')
     def test_execute_no_change_of_state(self, mock_get_aai, mock_logger):
         mock_get_aai.return_value = self.mock_sub, self.nfs
@@ -88,7 +88,7 @@ class SubscriptionHandlerTest(TestCase):
                                                               self.mock_app_conf)
         self.mock_aai_event_thread.return_value.cancel.assert_called()
 
-    @patch('mod.pmsh_logging.debug')
+    @patch('mod.logger.error')
     @patch('mod.aai_client.get_pmsh_subscription_data')
     def test_execute_exception(self, mock_get_aai, mock_logger):
         mock_get_aai.return_value = self.mock_sub, self.nfs