Merge "[GENERAL] Add Andreas Geissler as committer."
[oom/offline-installer.git] / build / package.py
index 6e509a3..3252faa 100755 (executable)
@@ -28,12 +28,14 @@ import glob
 import json
 import sys
 import os
+import hashlib
 
 import tarfile
 import git
 
 log = logging.getLogger(__name__)
-script_location = os.path.dirname(os.path.realpath(__file__))
+script_location = os.path.abspath(os.path.join(__file__, '..'))
+offline_repository_dir = os.path.abspath(os.path.join(script_location, '..'))
 
 
 def prepare_application_repository(directory, url, refspec, patch_path):
@@ -68,27 +70,56 @@ def prepare_application_repository(directory, url, refspec, patch_path):
     return repository
 
 
-def create_package_info_file(output_file, repository_list):
+def create_package_info_file(output_file, repository_list, tag, metadata):
     """
     Generates text file in json format containing basic information about the build
     :param output_file:
     :param repository_list: list of repositories to be included in package info
+    :param tag: build version of packages
+    :param metadata: additional metadata into package.info
     :return:
     """
     log.info('Generating package.info file')
     build_info = {
         'Build_info': {
-            'build_date': datetime.now().strftime('%Y-%m-%d_%H-%M')
+            'build_date': datetime.now().strftime('%Y-%m-%d_%H-%M'),
+            'Version': tag,
+            'Packages': {}
         }
     }
     for repository in repository_list:
         build_info['Build_info'][
             repository.config_reader().get_value('remote "origin"', 'url')] = repository.head.commit.hexsha
 
+    if metadata:
+        for meta in metadata:
+            build_info['Build_info'].update(meta)
+
     with open(output_file, 'w') as outfile:
         json.dump(build_info, outfile, indent=4)
 
 
+def add_checksum_info(output_dir):
+    """
+    Add checksum information into package.info file
+    :param output_dir: directory where are packages
+    """
+    tar_files = ['resources_package.tar', 'aux_package.tar', 'sw_package.tar']
+    for tar_file in tar_files:
+        try:
+            checksum = hashlib.md5()
+            with open(os.path.join(output_dir, tar_file), 'rb') as f:
+                for chunk in iter(lambda: f.read(4096), b""):
+                    checksum.update(chunk)
+            with open(os.path.join(output_dir, 'package.info'), 'r') as f:
+                json_data = json.load(f)
+                json_data['Build_info']['Packages'].update({tar_file: checksum.hexdigest()})
+            with open(os.path.join(output_dir, 'package.info'), 'w') as f:
+                json.dump(json_data, f, indent=4)
+        except FileNotFoundError:
+            pass
+
+
 def create_package(tar_content, file_name):
     """
     Creates packages
@@ -98,39 +129,66 @@ def create_package(tar_content, file_name):
     log.info('Creating package {}'.format(file_name))
     with tarfile.open(file_name, 'w') as output_tar_file:
         for src, dst in tar_content.items():
-            output_tar_file.add(src, dst)
+            if src != '':
+                output_tar_file.add(src, dst)
 
 
-def build_offline_deliverables(application_repository_url,
+def metadata_validation(param):
+    """
+    Validation of metadata parameters
+    :param param: parameter to be checked needs to be in format key=value
+    """
+    try:
+        key, value = param.split('=')
+        assert (key and value)
+        return {key: value}
+    except (ValueError, AssertionError):
+        msg = "%r is not a valid parameter. Needs to be in format key=value" % param
+        raise argparse.ArgumentTypeError(msg)
+
+
+def build_offline_deliverables(build_version,
+                               application_repository_url,
                                application_repository_reference,
                                application_patch_file,
+                               application_charts_dir,
+                               application_configuration,
+                               application_patch_role,
                                output_dir,
                                resources_directory,
+                               aux_directory,
                                skip_sw,
                                skip_resources,
                                skip_aux,
-                               overwrite):
+                               overwrite,
+                               metadata):
     """
     Prepares offline deliverables
+    :param build_version: Version for packages tagging
     :param application_repository_url: git repository hosting application helm charts
     :param application_repository_reference: git refspec for repository hosting application helm charts
     :param application_patch_file: git patch file to be applied over application repository
+    :param application_charts_dir: path to directory under application repository containing helm charts
+    :param application_configuration:  path to application configuration file (helm override configuration)
+    :param application_patch_role: path to application patch role (executed just before helm deploy)
     :param output_dir: Destination directory for saving packages
     :param resources_directory: Path to resource directory
+    :param aux_directory: Path to aux binary directory
     :param skip_sw: skip sw package generation
     :param skip_resources: skip resources package generation
     :param skip_aux: skip aux package generation
     :param overwrite: overwrite files in output directory
+    :param metadata: add metadata info into package.info
     :return:
     """
 
     if os.path.exists(output_dir) and os.listdir(output_dir):
         if not overwrite:
             log.error('Output directory is not empty, use overwrite to force build')
-            raise FileExistsError
+            raise FileExistsError(output_dir)
+        shutil.rmtree(output_dir)
 
     # Git
-    offline_repository_dir = os.path.join(script_location, '..')
     offline_repository = git.Repo(offline_repository_dir)
 
     application_dir = os.path.join(output_dir, 'application_repository')
@@ -141,23 +199,23 @@ def build_offline_deliverables(application_repository_url,
 
     # Package info
     info_file = os.path.join(output_dir, 'package.info')
-    create_package_info_file(info_file, [application_repository, offline_repository])
+    create_package_info_file(info_file, [application_repository, offline_repository], build_version, metadata)
 
     # packages layout as dictionaries. <file> : <file location under tar archive>
     sw_content = {
         os.path.join(offline_repository_dir, 'ansible'): 'ansible',
-        os.path.join(offline_repository_dir, 'config',
-                     'application_configuration.yml'): 'ansible/application/application_configuration.yml',
-        os.path.join(offline_repository_dir, 'patches', 'onap-patch-role'): 'ansible/application/onap-patch-role',
-        os.path.join(application_dir, 'kubernetes'): 'ansible/application/helm_charts',
-        info_file: 'packge.info'
+        application_configuration: 'ansible/application/application_configuration.yml',
+        application_patch_role: 'ansible/application/onap-patch-role',
+        os.path.join(application_dir, application_charts_dir): 'ansible/application/helm_charts',
+        info_file: 'package.info'
     }
     resources_content = {
         resources_directory: '',
-        info_file: 'packge.info'
+        info_file: 'package.info'
     }
     aux_content = {
-        info_file: 'packge.info'
+        aux_directory: '',
+        info_file: 'package.info'
     }
 
     if not skip_sw:
@@ -181,26 +239,26 @@ def build_offline_deliverables(application_repository_url,
         log.info('Binaries - workaround')
         download_dir_path = os.path.join(resources_directory, 'downloads')
         os.chdir(download_dir_path)
-        for file in os.listdir():
+        for file in os.listdir(download_dir_path):
             if os.path.islink(file):
                 os.unlink(file)
 
-        rke_files = glob.glob(os.path.join('.', '**/rke_linux-amd64'), recursive=True)
-        os.symlink(rke_files[0], os.path.join(download_dir_path, rke_files[0].split('/')[-1]))
+        bin_pattern_list = ['**/rke_linux-amd64',
+                            '**/helm-*-linux-amd64.tar.gz',
+                            '**/kubectl',
+                            '**/helm-push_*_linux_amd64.tar.gz',
+                            '**/kube-prometheus-stack-*.tgz',
+                            '**/cert-manager-*.tgz',
+                            '**/cmctl-linux-amd64.tar.gz',
+                            '**/strimzi-kafka-operator-helm-3-chart-*.tgz']
 
-        helm_tar_files = glob.glob(os.path.join('.', '**/helm-*-linux-amd64.tar.gz'), recursive=True)
-        os.symlink(helm_tar_files[0], os.path.join(download_dir_path, helm_tar_files[0].split('/')[-1]))
-
-        kubectl_files = glob.glob(os.path.join('.', '**/kubectl'), recursive=True)
-        os.symlink(kubectl_files[0], os.path.join(download_dir_path, kubectl_files[0].split('/')[-1]))
+        for pattern in bin_pattern_list:
+            for bin_file in glob.glob(os.path.join('.', pattern), recursive=True):
+                os.symlink(bin_file, os.path.join(download_dir_path, bin_file.split('/')[-1]))
 
         os.chdir(script_location)
         # End of workaround
 
-        log.info('Create rhel repo')
-        createrepo = subprocess.run(['createrepo', os.path.join(resources_directory, 'pkg', 'rhel')])
-        createrepo.check_returncode()
-
         resources_package_tar_path = os.path.join(output_dir, 'resources_package.tar')
         create_package(resources_content, resources_package_tar_path)
 
@@ -208,7 +266,8 @@ def build_offline_deliverables(application_repository_url,
         aux_package_tar_path = os.path.join(output_dir, 'aux_package.tar')
         create_package(aux_content, aux_package_tar_path)
 
-    shutil.rmtree(application_dir)
+    add_checksum_info(output_dir)
+    shutil.rmtree(application_dir, ignore_errors=True)
 
 
 def run_cli():
@@ -216,16 +275,29 @@ def run_cli():
     Run as cli tool
     """
     parser = argparse.ArgumentParser(description='Create Package For Offline Installer')
+    parser.add_argument('--build-version',
+                        help='version of the build', default='')
     parser.add_argument('application_repository_url', metavar='application-repository-url',
                         help='git repository hosting application helm charts')
     parser.add_argument('--application-repository_reference', default='master',
                         help='git refspec for repository hosting application helm charts')
     parser.add_argument('--application-patch_file',
                         help='git patch file to be applied over application repository', default='')
-    parser.add_argument('--output-dir', '-o', default=os.path.join(script_location, '..', '..'),
+    parser.add_argument('--application-charts_dir',
+                        help='path to directory under application repository containing helm charts ',
+                        default='kubernetes')
+    parser.add_argument('--application-configuration',
+                        help='path to application configuration file (helm override configuration)',
+                        default=os.path.join(offline_repository_dir, 'config/application_configuration.yml'))
+    parser.add_argument('--application-patch-role',
+                        help='path to application patch role file (ansible role) to be executed right before installation',
+                        default='')
+    parser.add_argument('--output-dir', '-o', default=os.path.join(offline_repository_dir, '../packages'),
                         help='Destination directory for saving packages')
-    parser.add_argument('--resources-directory',
+    parser.add_argument('--resources-directory', default=os.path.join(offline_repository_dir, '../resources'),
                         help='Path to resource directory')
+    parser.add_argument('--aux-directory',
+                        help='Path to aux binary directory', default='')
     parser.add_argument('--skip-sw', action='store_true', default=False,
                         help='Set to skip sw package generation')
     parser.add_argument('--skip-resources', action='store_true', default=False,
@@ -236,6 +308,8 @@ def run_cli():
                         help='overwrite files in output directory')
     parser.add_argument('--debug', action='store_true', default=False,
                         help='Turn on debug output')
+    parser.add_argument('--add-metadata', nargs="+", type=metadata_validation,
+                        help='additional metadata added into package.info, format: key=value')
     args = parser.parse_args()
 
     if args.debug:
@@ -243,17 +317,22 @@ def run_cli():
     else:
         logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
 
-    build_offline_deliverables(args.application_repository_url,
+    build_offline_deliverables(args.build_version,
+                               args.application_repository_url,
                                args.application_repository_reference,
                                args.application_patch_file,
+                               args.application_charts_dir,
+                               args.application_configuration,
+                               args.application_patch_role,
                                args.output_dir,
                                args.resources_directory,
+                               args.aux_directory,
                                args.skip_sw,
                                args.skip_resources,
                                args.skip_aux,
-                               args.overwrite)
+                               args.overwrite,
+                               args.add_metadata)
 
 
 if __name__ == '__main__':
     run_cli()
-