Merge "[GENERAL] Add Andreas Geissler as committer."
[oom/offline-installer.git] / build / package.py
1 #! /usr/bin/env python3
2 # -*- coding: utf-8 -*-
3
4 #   COPYRIGHT NOTICE STARTS HERE
5
6 #   Copyright 2019 . Samsung Electronics Co., Ltd.
7 #
8 #   Licensed under the Apache License, Version 2.0 (the "License");
9 #   you may not use this file except in compliance with the License.
10 #   You may obtain a copy of the License at
11 #
12 #       http://www.apache.org/licenses/LICENSE-2.0
13 #
14 #   Unless required by applicable law or agreed to in writing, software
15 #   distributed under the License is distributed on an "AS IS" BASIS,
16 #   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 #   See the License for the specific language governing permissions and
18 #   limitations under the License.
19
20 #   COPYRIGHT NOTICE ENDS HERE
21
22 from datetime import datetime
23 import subprocess
24 import argparse
25 import logging
26 import shutil
27 import glob
28 import json
29 import sys
30 import os
31 import hashlib
32
33 import tarfile
34 import git
35
36 log = logging.getLogger(__name__)
37 script_location = os.path.abspath(os.path.join(__file__, '..'))
38 offline_repository_dir = os.path.abspath(os.path.join(script_location, '..'))
39
40
41 def prepare_application_repository(directory, url, refspec, patch_path):
42     """
43     Downloads git repository according to refspec, applies patch if provided
44     :param directory: path to repository
45     :param url: url to repository
46     :param refspec: refspec to fetch
47     :param patch_path: path git patch to be applied over repository
48     :return: repository - git repository object
49     """
50
51     try:
52         shutil.rmtree(directory)
53     except FileNotFoundError:
54         pass
55
56     log.info('Cloning {} with refspec {} '.format(url, refspec))
57     repository = git.Repo.init(directory)
58     origin = repository.create_remote('origin', url)
59     origin.pull(refspec)
60     repository.git.submodule('update', '--init')
61
62     if patch_path:
63         log.info('Applying {} over {} {}'.format(patch_path,
64                                                  url,
65                                                  refspec))
66         repository.git.apply(patch_path)
67     else:
68         log.info('No patch file provided, skipping patching')
69
70     return repository
71
72
73 def create_package_info_file(output_file, repository_list, tag, metadata):
74     """
75     Generates text file in json format containing basic information about the build
76     :param output_file:
77     :param repository_list: list of repositories to be included in package info
78     :param tag: build version of packages
79     :param metadata: additional metadata into package.info
80     :return:
81     """
82     log.info('Generating package.info file')
83     build_info = {
84         'Build_info': {
85             'build_date': datetime.now().strftime('%Y-%m-%d_%H-%M'),
86             'Version': tag,
87             'Packages': {}
88         }
89     }
90     for repository in repository_list:
91         build_info['Build_info'][
92             repository.config_reader().get_value('remote "origin"', 'url')] = repository.head.commit.hexsha
93
94     if metadata:
95         for meta in metadata:
96             build_info['Build_info'].update(meta)
97
98     with open(output_file, 'w') as outfile:
99         json.dump(build_info, outfile, indent=4)
100
101
102 def add_checksum_info(output_dir):
103     """
104     Add checksum information into package.info file
105     :param output_dir: directory where are packages
106     """
107     tar_files = ['resources_package.tar', 'aux_package.tar', 'sw_package.tar']
108     for tar_file in tar_files:
109         try:
110             checksum = hashlib.md5()
111             with open(os.path.join(output_dir, tar_file), 'rb') as f:
112                 for chunk in iter(lambda: f.read(4096), b""):
113                     checksum.update(chunk)
114             with open(os.path.join(output_dir, 'package.info'), 'r') as f:
115                 json_data = json.load(f)
116                 json_data['Build_info']['Packages'].update({tar_file: checksum.hexdigest()})
117             with open(os.path.join(output_dir, 'package.info'), 'w') as f:
118                 json.dump(json_data, f, indent=4)
119         except FileNotFoundError:
120             pass
121
122
123 def create_package(tar_content, file_name):
124     """
125     Creates packages
126     :param tar_content: list of dictionaries defining src file and destination tar file
127     :param file_name: output file
128     """
129     log.info('Creating package {}'.format(file_name))
130     with tarfile.open(file_name, 'w') as output_tar_file:
131         for src, dst in tar_content.items():
132             if src != '':
133                 output_tar_file.add(src, dst)
134
135
136 def metadata_validation(param):
137     """
138     Validation of metadata parameters
139     :param param: parameter to be checked needs to be in format key=value
140     """
141     try:
142         key, value = param.split('=')
143         assert (key and value)
144         return {key: value}
145     except (ValueError, AssertionError):
146         msg = "%r is not a valid parameter. Needs to be in format key=value" % param
147         raise argparse.ArgumentTypeError(msg)
148
149
150 def build_offline_deliverables(build_version,
151                                application_repository_url,
152                                application_repository_reference,
153                                application_patch_file,
154                                application_charts_dir,
155                                application_configuration,
156                                application_patch_role,
157                                output_dir,
158                                resources_directory,
159                                aux_directory,
160                                skip_sw,
161                                skip_resources,
162                                skip_aux,
163                                overwrite,
164                                metadata):
165     """
166     Prepares offline deliverables
167     :param build_version: Version for packages tagging
168     :param application_repository_url: git repository hosting application helm charts
169     :param application_repository_reference: git refspec for repository hosting application helm charts
170     :param application_patch_file: git patch file to be applied over application repository
171     :param application_charts_dir: path to directory under application repository containing helm charts
172     :param application_configuration:  path to application configuration file (helm override configuration)
173     :param application_patch_role: path to application patch role (executed just before helm deploy)
174     :param output_dir: Destination directory for saving packages
175     :param resources_directory: Path to resource directory
176     :param aux_directory: Path to aux binary directory
177     :param skip_sw: skip sw package generation
178     :param skip_resources: skip resources package generation
179     :param skip_aux: skip aux package generation
180     :param overwrite: overwrite files in output directory
181     :param metadata: add metadata info into package.info
182     :return:
183     """
184
185     if os.path.exists(output_dir) and os.listdir(output_dir):
186         if not overwrite:
187             log.error('Output directory is not empty, use overwrite to force build')
188             raise FileExistsError(output_dir)
189         shutil.rmtree(output_dir)
190
191     # Git
192     offline_repository = git.Repo(offline_repository_dir)
193
194     application_dir = os.path.join(output_dir, 'application_repository')
195     application_repository = prepare_application_repository(application_dir,
196                                                             application_repository_url,
197                                                             application_repository_reference,
198                                                             application_patch_file)
199
200     # Package info
201     info_file = os.path.join(output_dir, 'package.info')
202     create_package_info_file(info_file, [application_repository, offline_repository], build_version, metadata)
203
204     # packages layout as dictionaries. <file> : <file location under tar archive>
205     sw_content = {
206         os.path.join(offline_repository_dir, 'ansible'): 'ansible',
207         application_configuration: 'ansible/application/application_configuration.yml',
208         application_patch_role: 'ansible/application/onap-patch-role',
209         os.path.join(application_dir, application_charts_dir): 'ansible/application/helm_charts',
210         info_file: 'package.info'
211     }
212     resources_content = {
213         resources_directory: '',
214         info_file: 'package.info'
215     }
216     aux_content = {
217         aux_directory: '',
218         info_file: 'package.info'
219     }
220
221     if not skip_sw:
222         log.info('Building offline installer')
223         os.chdir(os.path.join(offline_repository_dir, 'ansible', 'docker'))
224         installer_build = subprocess.run(
225             os.path.join(offline_repository_dir, 'ansible', 'docker', 'build_ansible_image.sh'))
226         installer_build.check_returncode()
227         os.chdir(script_location)
228         sw_package_tar_path = os.path.join(output_dir, 'sw_package.tar')
229         create_package(sw_content, sw_package_tar_path)
230
231     if not skip_resources:
232         log.info('Building own dns image')
233         dns_build = subprocess.run([
234             os.path.join(offline_repository_dir, 'build', 'creating_data', 'create_nginx_image', '01create-image.sh'),
235             os.path.join(resources_directory, 'offline_data', 'docker_images_infra')])
236         dns_build.check_returncode()
237
238         # Workaround for downloading without "flat" option
239         log.info('Binaries - workaround')
240         download_dir_path = os.path.join(resources_directory, 'downloads')
241         os.chdir(download_dir_path)
242         for file in os.listdir(download_dir_path):
243             if os.path.islink(file):
244                 os.unlink(file)
245
246         bin_pattern_list = ['**/rke_linux-amd64',
247                             '**/helm-*-linux-amd64.tar.gz',
248                             '**/kubectl',
249                             '**/helm-push_*_linux_amd64.tar.gz',
250                             '**/kube-prometheus-stack-*.tgz',
251                             '**/cert-manager-*.tgz',
252                             '**/cmctl-linux-amd64.tar.gz',
253                             '**/strimzi-kafka-operator-helm-3-chart-*.tgz']
254
255         for pattern in bin_pattern_list:
256             for bin_file in glob.glob(os.path.join('.', pattern), recursive=True):
257                 os.symlink(bin_file, os.path.join(download_dir_path, bin_file.split('/')[-1]))
258
259         os.chdir(script_location)
260         # End of workaround
261
262         resources_package_tar_path = os.path.join(output_dir, 'resources_package.tar')
263         create_package(resources_content, resources_package_tar_path)
264
265     if not skip_aux:
266         aux_package_tar_path = os.path.join(output_dir, 'aux_package.tar')
267         create_package(aux_content, aux_package_tar_path)
268
269     add_checksum_info(output_dir)
270     shutil.rmtree(application_dir, ignore_errors=True)
271
272
273 def run_cli():
274     """
275     Run as cli tool
276     """
277     parser = argparse.ArgumentParser(description='Create Package For Offline Installer')
278     parser.add_argument('--build-version',
279                         help='version of the build', default='')
280     parser.add_argument('application_repository_url', metavar='application-repository-url',
281                         help='git repository hosting application helm charts')
282     parser.add_argument('--application-repository_reference', default='master',
283                         help='git refspec for repository hosting application helm charts')
284     parser.add_argument('--application-patch_file',
285                         help='git patch file to be applied over application repository', default='')
286     parser.add_argument('--application-charts_dir',
287                         help='path to directory under application repository containing helm charts ',
288                         default='kubernetes')
289     parser.add_argument('--application-configuration',
290                         help='path to application configuration file (helm override configuration)',
291                         default=os.path.join(offline_repository_dir, 'config/application_configuration.yml'))
292     parser.add_argument('--application-patch-role',
293                         help='path to application patch role file (ansible role) to be executed right before installation',
294                         default='')
295     parser.add_argument('--output-dir', '-o', default=os.path.join(offline_repository_dir, '../packages'),
296                         help='Destination directory for saving packages')
297     parser.add_argument('--resources-directory', default=os.path.join(offline_repository_dir, '../resources'),
298                         help='Path to resource directory')
299     parser.add_argument('--aux-directory',
300                         help='Path to aux binary directory', default='')
301     parser.add_argument('--skip-sw', action='store_true', default=False,
302                         help='Set to skip sw package generation')
303     parser.add_argument('--skip-resources', action='store_true', default=False,
304                         help='Set to skip resources package generation')
305     parser.add_argument('--skip-aux', action='store_true', default=False,
306                         help='Set to skip aux package generation')
307     parser.add_argument('--overwrite', action='store_true', default=False,
308                         help='overwrite files in output directory')
309     parser.add_argument('--debug', action='store_true', default=False,
310                         help='Turn on debug output')
311     parser.add_argument('--add-metadata', nargs="+", type=metadata_validation,
312                         help='additional metadata added into package.info, format: key=value')
313     args = parser.parse_args()
314
315     if args.debug:
316         logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
317     else:
318         logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
319
320     build_offline_deliverables(args.build_version,
321                                args.application_repository_url,
322                                args.application_repository_reference,
323                                args.application_patch_file,
324                                args.application_charts_dir,
325                                args.application_configuration,
326                                args.application_patch_role,
327                                args.output_dir,
328                                args.resources_directory,
329                                args.aux_directory,
330                                args.skip_sw,
331                                args.skip_resources,
332                                args.skip_aux,
333                                args.overwrite,
334                                args.add_metadata)
335
336
337 if __name__ == '__main__':
338     run_cli()