import json
import sys
import os
+import hashlib
import tarfile
import git
return repository
-def create_package_info_file(output_file, repository_list, tag):
+def create_package_info_file(output_file, repository_list, tag, metadata):
"""
Generates text file in json format containing basic information about the build
:param output_file:
:param repository_list: list of repositories to be included in package info
:param tag: build version of packages
+ :param metadata: additional metadata into package.info
:return:
"""
log.info('Generating package.info file')
build_info = {
'Build_info': {
'build_date': datetime.now().strftime('%Y-%m-%d_%H-%M'),
- 'Version': tag
+ 'Version': tag,
+ 'Packages': {}
}
}
for repository in repository_list:
build_info['Build_info'][
repository.config_reader().get_value('remote "origin"', 'url')] = repository.head.commit.hexsha
+ if metadata:
+ for meta in metadata:
+ build_info['Build_info'].update(meta)
+
with open(output_file, 'w') as outfile:
json.dump(build_info, outfile, indent=4)
+def add_checksum_info(output_dir):
+ """
+ Add checksum information into package.info file
+ :param output_dir: directory where are packages
+ """
+ tar_files = ['resources_package.tar', 'aux_package.tar', 'sw_package.tar']
+ for tar_file in tar_files:
+ try:
+ checksum = hashlib.md5()
+ with open(os.path.join(output_dir, tar_file), 'rb') as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ checksum.update(chunk)
+ with open(os.path.join(output_dir, 'package.info'), 'r') as f:
+ json_data = json.load(f)
+ json_data['Build_info']['Packages'].update({tar_file: checksum.hexdigest()})
+ with open(os.path.join(output_dir, 'package.info'), 'w') as f:
+ json.dump(json_data, f, indent=4)
+ except FileNotFoundError:
+ pass
+
+
def create_package(tar_content, file_name):
"""
Creates packages
output_tar_file.add(src, dst)
+def metadata_validation(param):
+ """
+ Validation of metadata parameters
+ :param param: parameter to be checked needs to be in format key=value
+ """
+ try:
+ key, value = param.split('=')
+ assert (key and value)
+ return {key: value}
+ except (ValueError, AssertionError):
+ msg = "%r is not a valid parameter. Needs to be in format key=value" % param
+ raise argparse.ArgumentTypeError(msg)
+
+
def build_offline_deliverables(build_version,
application_repository_url,
application_repository_reference,
skip_sw,
skip_resources,
skip_aux,
- overwrite):
+ overwrite,
+ metadata):
"""
Prepares offline deliverables
:param build_version: Version for packages tagging
:param skip_resources: skip resources package generation
:param skip_aux: skip aux package generation
:param overwrite: overwrite files in output directory
+ :param metadata: add metadata info into package.info
:return:
"""
# Package info
info_file = os.path.join(output_dir, 'package.info')
- create_package_info_file(info_file, [application_repository, offline_repository], build_version)
+ create_package_info_file(info_file, [application_repository, offline_repository], build_version, metadata)
# packages layout as dictionaries. <file> : <file location under tar archive>
sw_content = {
info_file: 'package.info'
}
- # add separator if build version not empty
- build_version = "-" + build_version if build_version != "" else ""
-
if not skip_sw:
log.info('Building offline installer')
os.chdir(os.path.join(offline_repository_dir, 'ansible', 'docker'))
os.path.join(offline_repository_dir, 'ansible', 'docker', 'build_ansible_image.sh'))
installer_build.check_returncode()
os.chdir(script_location)
- sw_package_tar_path = os.path.join(output_dir, 'sw_package' + build_version + '.tar')
+ sw_package_tar_path = os.path.join(output_dir, 'sw_package.tar')
create_package(sw_content, sw_package_tar_path)
if not skip_resources:
if os.path.islink(file):
os.unlink(file)
- rke_files = glob.glob(os.path.join('.', '**/rke_linux-amd64'), recursive=True)
- os.symlink(rke_files[0], os.path.join(download_dir_path, rke_files[0].split('/')[-1]))
-
- helm_tar_files = glob.glob(os.path.join('.', '**/helm-*-linux-amd64.tar.gz'), recursive=True)
- os.symlink(helm_tar_files[0], os.path.join(download_dir_path, helm_tar_files[0].split('/')[-1]))
+ bin_pattern_list = ['**/rke_linux-amd64',
+ '**/helm-*-linux-amd64.tar.gz',
+ '**/kubectl',
+ '**/helm-push_*_linux_amd64.tar.gz',
+ '**/kube-prometheus-stack-*.tgz',
+ '**/cert-manager-*.tgz',
+ '**/cmctl-linux-amd64.tar.gz',
+ '**/strimzi-kafka-operator-helm-3-chart-*.tgz']
- kubectl_files = glob.glob(os.path.join('.', '**/kubectl'), recursive=True)
- os.symlink(kubectl_files[0], os.path.join(download_dir_path, kubectl_files[0].split('/')[-1]))
+ for pattern in bin_pattern_list:
+ for bin_file in glob.glob(os.path.join('.', pattern), recursive=True):
+ os.symlink(bin_file, os.path.join(download_dir_path, bin_file.split('/')[-1]))
os.chdir(script_location)
# End of workaround
- resources_package_tar_path = os.path.join(output_dir, 'resources_package' + build_version + '.tar')
+ resources_package_tar_path = os.path.join(output_dir, 'resources_package.tar')
create_package(resources_content, resources_package_tar_path)
if not skip_aux:
- aux_package_tar_path = os.path.join(output_dir, 'aux_package' + build_version + '.tar')
+ aux_package_tar_path = os.path.join(output_dir, 'aux_package.tar')
create_package(aux_content, aux_package_tar_path)
- shutil.rmtree(application_dir)
+ add_checksum_info(output_dir)
+ shutil.rmtree(application_dir, ignore_errors=True)
def run_cli():
help='overwrite files in output directory')
parser.add_argument('--debug', action='store_true', default=False,
help='Turn on debug output')
+ parser.add_argument('--add-metadata', nargs="+", type=metadata_validation,
+ help='additional metadata added into package.info, format: key=value')
args = parser.parse_args()
if args.debug:
args.skip_sw,
args.skip_resources,
args.skip_aux,
- args.overwrite)
+ args.overwrite,
+ args.add_metadata)
if __name__ == '__main__':