Merge "[VVP] Support pluggable data sources for preload data"
[vvp/validation-scripts.git] / ice_validator / tests / conftest.py
index b09a8aa..e0aa864 100644 (file)
@@ -43,6 +43,14 @@ import json
 import os
 import re
 import time
+
+from preload.engine import PLUGIN_MGR, create_preloads
+from tests.helpers import get_output_dir
+
+try:
+    from html import escape
+except ImportError:
+    from cgi import escape
 from collections import defaultdict
 
 import traceback
@@ -91,18 +99,6 @@ COLLECTION_FAILURES = []
 ALL_RESULTS = []
 
 
-def get_output_dir(config):
-    """
-    Retrieve the output directory for the reports and create it if necessary
-    :param config: pytest configuration
-    :return: output directory as string
-    """
-    output_dir = config.option.output_dir or DEFAULT_OUTPUT_DIR
-    if not os.path.exists(output_dir):
-        os.makedirs(output_dir, exist_ok=True)
-    return output_dir
-
-
 def extract_error_msg(rep):
     """
     If a custom error message was provided, then extract it otherwise
@@ -348,6 +344,16 @@ def pytest_sessionfinish(session, exitstatus):
     )
 
 
+def pytest_terminal_summary(terminalreporter, exitstatus):
+    # Ensures all preload information and warnings appear after
+    # test results
+    try:
+        create_preloads(terminalreporter.config, exitstatus)
+    except Exception:
+        print("Error creating preloads, skipping preload generation")
+        traceback.print_exc()
+
+
 # noinspection PyUnusedLocal
 def pytest_collection_modifyitems(session, config, items):
     """
@@ -357,31 +363,27 @@ def pytest_collection_modifyitems(session, config, items):
     config.traceability_items = list(items)  # save all items for traceability
     if not config.option.self_test:
         for item in items:
-            # checking if test belongs to a category
-            if hasattr(item.function, "categories"):
-                if config.option.test_categories:
-                    test_categories = getattr(item.function, "categories")
-                    passed_categories = config.option.test_categories
-                    if not all(
-                        category in passed_categories for category in test_categories
-                    ):
-                        item.add_marker(
-                            pytest.mark.skip(
-                                reason=(
-                                    "Test categories do not match "
-                                    "all the passed categories"
-                                )
-                            )
+            passed_categories = set(config.option.test_categories or [])
+            all_of_categories = getattr(item.function, "all_categories", set())
+            any_of_categories = getattr(item.function, "any_categories", set())
+            if all_of_categories and not all_of_categories.issubset(passed_categories):
+                item.add_marker(
+                    pytest.mark.skip(
+                        reason=(
+                            "Test categories do not match " "all the passed categories"
                         )
-                else:
-                    item.add_marker(
-                        pytest.mark.skip(
-                            reason=(
-                                "Test belongs to a category but "
-                                "no categories were passed"
-                            )
+                    )
+                )
+            if any_of_categories and not passed_categories.intersection(
+                any_of_categories
+            ):
+                item.add_marker(
+                    pytest.mark.skip(
+                        reason=(
+                            "Test categories do not match " "any the passed categories"
                         )
                     )
+                )
 
     items.sort(
         key=lambda x: (0, x.name)
@@ -745,8 +747,10 @@ def generate_html_report(outpath, categories, template_path, failures):
             {
                 "file_links": make_href(failure.files, template_path),
                 "test_id": failure.test_id,
-                "error_message": failure.error_message.replace("\n", "<br/><br/>"),
-                "raw_output": failure.raw_output,
+                "error_message": escape(failure.error_message).replace(
+                    "\n", "<br/><br/>"
+                ),
+                "raw_output": escape(failure.raw_output),
                 "requirements": docutils.core.publish_parts(
                     writer_name="html", source=failure.requirement_text(reqs)
                 )["body"],
@@ -824,6 +828,33 @@ def pytest_addoption(parser):
         help="optional category of test to execute",
     )
 
+    parser.addoption(
+        "--preload-format",
+        dest="preload_formats",
+        action="append",
+        help=(
+            "Preload format to create (multiple allowed). If not provided "
+            "then all available formats will be created: {}"
+        ).format(", ".join(g.format_name() for g in PLUGIN_MGR.preload_generators)),
+    )
+
+    parser.addoption(
+        "--preload-source-type",
+        dest="preload_source_type",
+        action="store",
+        default="envfiles",
+        help=(
+            "Preload source type to create (multiple allowed): {}"
+        ).format(", ".join(s.get_identifier() for s in PLUGIN_MGR.preload_sources)),
+    )
+
+    parser.addoption(
+        "--preload-source",
+        dest="preload_source",
+        action="store",
+        help="File or directory containing the source dat for the preloads",
+    )
+
 
 def pytest_configure(config):
     """
@@ -837,7 +868,8 @@ def pytest_configure(config):
         or config.getoption("self_test")
         or config.getoption("help")
     ):
-        raise Exception('One of "--template-dir" or' ' "--self-test" must be specified')
+        raise Exception('One of "--template-directory" or'
+                        ' "--self-test" must be specified')
 
 
 def pytest_generate_tests(metafunc):
@@ -958,7 +990,7 @@ def hash_directory(path):
     :param path: string directory containing files
     :return: string MD5 hash code (hex)
     """
-    md5 = hashlib.md5()
+    md5 = hashlib.md5()  # nosec
     for dir_path, sub_dirs, filenames in os.walk(path):
         for filename in filenames:
             file_path = os.path.join(dir_path, filename)
@@ -1040,12 +1072,11 @@ def generate_rst_table(output_dir, data):
     rst_path = os.path.join(output_dir, "rst.csv")
     with open(rst_path, "w", newline="") as f:
         out = csv.writer(f)
-        out.writerow(("Requirement ID", "Requirement", "Test Module", "Test Name"))
+        out.writerow(("Requirement ID", "Test Module", "Test Name"))
         for req_id, metadata in data.items():
             out.writerow(
                 (
                     metadata["full_title"],
-                    metadata["description"],
                     metadata["test_case"],
                     metadata["validated_by"],
                 )