Merge "[GENERAL] Add Andreas Geissler as committer."
[oom/offline-installer.git] / build / package.py
index ad921ed..3252faa 100755 (executable)
@@ -28,12 +28,14 @@ import glob
 import json
 import sys
 import os
+import hashlib
 
 import tarfile
 import git
 
 log = logging.getLogger(__name__)
-script_location = os.path.dirname(os.path.realpath(__file__))
+script_location = os.path.abspath(os.path.join(__file__, '..'))
+offline_repository_dir = os.path.abspath(os.path.join(script_location, '..'))
 
 
 def prepare_application_repository(directory, url, refspec, patch_path):
@@ -68,28 +70,56 @@ def prepare_application_repository(directory, url, refspec, patch_path):
     return repository
 
 
-def create_package_info_file(output_file, repository_list, tag):
+def create_package_info_file(output_file, repository_list, tag, metadata):
     """
     Generates text file in json format containing basic information about the build
     :param output_file:
     :param repository_list: list of repositories to be included in package info
+    :param tag: build version of packages
+    :param metadata: additional metadata into package.info
     :return:
     """
     log.info('Generating package.info file')
     build_info = {
         'Build_info': {
             'build_date': datetime.now().strftime('%Y-%m-%d_%H-%M'),
-            'Version': tag
+            'Version': tag,
+            'Packages': {}
         }
     }
     for repository in repository_list:
         build_info['Build_info'][
             repository.config_reader().get_value('remote "origin"', 'url')] = repository.head.commit.hexsha
 
+    if metadata:
+        for meta in metadata:
+            build_info['Build_info'].update(meta)
+
     with open(output_file, 'w') as outfile:
         json.dump(build_info, outfile, indent=4)
 
 
+def add_checksum_info(output_dir):
+    """
+    Add checksum information into package.info file
+    :param output_dir: directory where are packages
+    """
+    tar_files = ['resources_package.tar', 'aux_package.tar', 'sw_package.tar']
+    for tar_file in tar_files:
+        try:
+            checksum = hashlib.md5()
+            with open(os.path.join(output_dir, tar_file), 'rb') as f:
+                for chunk in iter(lambda: f.read(4096), b""):
+                    checksum.update(chunk)
+            with open(os.path.join(output_dir, 'package.info'), 'r') as f:
+                json_data = json.load(f)
+                json_data['Build_info']['Packages'].update({tar_file: checksum.hexdigest()})
+            with open(os.path.join(output_dir, 'package.info'), 'w') as f:
+                json.dump(json_data, f, indent=4)
+        except FileNotFoundError:
+            pass
+
+
 def create_package(tar_content, file_name):
     """
     Creates packages
@@ -100,7 +130,21 @@ def create_package(tar_content, file_name):
     with tarfile.open(file_name, 'w') as output_tar_file:
         for src, dst in tar_content.items():
             if src != '':
-              output_tar_file.add(src, dst)
+                output_tar_file.add(src, dst)
+
+
+def metadata_validation(param):
+    """
+    Validation of metadata parameters
+    :param param: parameter to be checked needs to be in format key=value
+    """
+    try:
+        key, value = param.split('=')
+        assert (key and value)
+        return {key: value}
+    except (ValueError, AssertionError):
+        msg = "%r is not a valid parameter. Needs to be in format key=value" % param
+        raise argparse.ArgumentTypeError(msg)
 
 
 def build_offline_deliverables(build_version,
@@ -116,7 +160,8 @@ def build_offline_deliverables(build_version,
                                skip_sw,
                                skip_resources,
                                skip_aux,
-                               overwrite):
+                               overwrite,
+                               metadata):
     """
     Prepares offline deliverables
     :param build_version: Version for packages tagging
@@ -133,17 +178,17 @@ def build_offline_deliverables(build_version,
     :param skip_resources: skip resources package generation
     :param skip_aux: skip aux package generation
     :param overwrite: overwrite files in output directory
+    :param metadata: add metadata info into package.info
     :return:
     """
 
     if os.path.exists(output_dir) and os.listdir(output_dir):
         if not overwrite:
             log.error('Output directory is not empty, use overwrite to force build')
-            raise FileExistsError
+            raise FileExistsError(output_dir)
         shutil.rmtree(output_dir)
 
     # Git
-    offline_repository_dir = os.path.join(script_location, '..')
     offline_repository = git.Repo(offline_repository_dir)
 
     application_dir = os.path.join(output_dir, 'application_repository')
@@ -154,7 +199,7 @@ def build_offline_deliverables(build_version,
 
     # Package info
     info_file = os.path.join(output_dir, 'package.info')
-    create_package_info_file(info_file, [application_repository, offline_repository], build_version)
+    create_package_info_file(info_file, [application_repository, offline_repository], build_version, metadata)
 
     # packages layout as dictionaries. <file> : <file location under tar archive>
     sw_content = {
@@ -180,7 +225,7 @@ def build_offline_deliverables(build_version,
             os.path.join(offline_repository_dir, 'ansible', 'docker', 'build_ansible_image.sh'))
         installer_build.check_returncode()
         os.chdir(script_location)
-        sw_package_tar_path = os.path.join(output_dir, 'sw_package' + build_version + '.tar')
+        sw_package_tar_path = os.path.join(output_dir, 'sw_package.tar')
         create_package(sw_content, sw_package_tar_path)
 
     if not skip_resources:
@@ -194,30 +239,35 @@ def build_offline_deliverables(build_version,
         log.info('Binaries - workaround')
         download_dir_path = os.path.join(resources_directory, 'downloads')
         os.chdir(download_dir_path)
-        for file in os.listdir():
+        for file in os.listdir(download_dir_path):
             if os.path.islink(file):
                 os.unlink(file)
 
-        rke_files = glob.glob(os.path.join('.', '**/rke_linux-amd64'), recursive=True)
-        os.symlink(rke_files[0], os.path.join(download_dir_path, rke_files[0].split('/')[-1]))
-
-        helm_tar_files = glob.glob(os.path.join('.', '**/helm-*-linux-amd64.tar.gz'), recursive=True)
-        os.symlink(helm_tar_files[0], os.path.join(download_dir_path, helm_tar_files[0].split('/')[-1]))
+        bin_pattern_list = ['**/rke_linux-amd64',
+                            '**/helm-*-linux-amd64.tar.gz',
+                            '**/kubectl',
+                            '**/helm-push_*_linux_amd64.tar.gz',
+                            '**/kube-prometheus-stack-*.tgz',
+                            '**/cert-manager-*.tgz',
+                            '**/cmctl-linux-amd64.tar.gz',
+                            '**/strimzi-kafka-operator-helm-3-chart-*.tgz']
 
-        kubectl_files = glob.glob(os.path.join('.', '**/kubectl'), recursive=True)
-        os.symlink(kubectl_files[0], os.path.join(download_dir_path, kubectl_files[0].split('/')[-1]))
+        for pattern in bin_pattern_list:
+            for bin_file in glob.glob(os.path.join('.', pattern), recursive=True):
+                os.symlink(bin_file, os.path.join(download_dir_path, bin_file.split('/')[-1]))
 
         os.chdir(script_location)
         # End of workaround
 
-        resources_package_tar_path = os.path.join(output_dir, 'resources_package' + build_version + '.tar')
+        resources_package_tar_path = os.path.join(output_dir, 'resources_package.tar')
         create_package(resources_content, resources_package_tar_path)
 
     if not skip_aux:
-        aux_package_tar_path = os.path.join(output_dir, 'aux_package'+ build_version + '.tar')
+        aux_package_tar_path = os.path.join(output_dir, 'aux_package.tar')
         create_package(aux_content, aux_package_tar_path)
 
-    shutil.rmtree(application_dir)
+    add_checksum_info(output_dir)
+    shutil.rmtree(application_dir, ignore_errors=True)
 
 
 def run_cli():
@@ -226,7 +276,7 @@ def run_cli():
     """
     parser = argparse.ArgumentParser(description='Create Package For Offline Installer')
     parser.add_argument('--build-version',
-                        help='version of the build', default='custom')
+                        help='version of the build', default='')
     parser.add_argument('application_repository_url', metavar='application-repository-url',
                         help='git repository hosting application helm charts')
     parser.add_argument('--application-repository_reference', default='master',
@@ -234,16 +284,17 @@ def run_cli():
     parser.add_argument('--application-patch_file',
                         help='git patch file to be applied over application repository', default='')
     parser.add_argument('--application-charts_dir',
-                        help='path to directory under application repository containing helm charts ', default='kubernetes')
+                        help='path to directory under application repository containing helm charts ',
+                        default='kubernetes')
     parser.add_argument('--application-configuration',
                         help='path to application configuration file (helm override configuration)',
-                        default='')
+                        default=os.path.join(offline_repository_dir, 'config/application_configuration.yml'))
     parser.add_argument('--application-patch-role',
                         help='path to application patch role file (ansible role) to be executed right before installation',
                         default='')
-    parser.add_argument('--output-dir', '-o', default=os.path.join(script_location, '..', '..'),
+    parser.add_argument('--output-dir', '-o', default=os.path.join(offline_repository_dir, '../packages'),
                         help='Destination directory for saving packages')
-    parser.add_argument('--resources-directory', default='',
+    parser.add_argument('--resources-directory', default=os.path.join(offline_repository_dir, '../resources'),
                         help='Path to resource directory')
     parser.add_argument('--aux-directory',
                         help='Path to aux binary directory', default='')
@@ -257,6 +308,8 @@ def run_cli():
                         help='overwrite files in output directory')
     parser.add_argument('--debug', action='store_true', default=False,
                         help='Turn on debug output')
+    parser.add_argument('--add-metadata', nargs="+", type=metadata_validation,
+                        help='additional metadata added into package.info, format: key=value')
     args = parser.parse_args()
 
     if args.debug:
@@ -277,7 +330,8 @@ def run_cli():
                                args.skip_sw,
                                args.skip_resources,
                                args.skip_aux,
-                               args.overwrite)
+                               args.overwrite,
+                               args.add_metadata)
 
 
 if __name__ == '__main__':