From 15a6ee9424be5eb4a55c0e6ec1289cec90f7018b Mon Sep 17 00:00:00 2001 From: Mateusz Pilat Date: Fri, 14 Feb 2020 18:37:35 +0100 Subject: [PATCH] Fix packaging script - Memory error in md5 calculations During package preparation whole file is read and stashed in to RAM. when the file is large, like ONAP package it will result in an error Signed-off-by: Mateusz Pilat Issue-ID: OOM-2308 Change-Id: Ic2dbea4018162bd4cfb380f6fb34078062c38a09 (cherry picked from commit f1d9816984b695fc4ae6a011c23c5f274385070e) --- build/package.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/build/package.py b/build/package.py index 7c35cf9b..40b2c3a3 100755 --- a/build/package.py +++ b/build/package.py @@ -107,11 +107,13 @@ def add_checksum_info(output_dir): tar_files = ['resources_package.tar', 'aux_package.tar', 'sw_package.tar'] for tar_file in tar_files: try: - data = os.path.join(output_dir, tar_file) - cksum = hashlib.md5(open(data, 'rb').read()).hexdigest() + checksum = hashlib.md5() + with open(os.path.join(output_dir, tar_file), 'rb') as f: + for chunk in iter(lambda: f.read(4096), b""): + checksum.update(chunk) with open(os.path.join(output_dir, 'package.info'), 'r') as f: json_data = json.load(f) - json_data['Build_info']['Packages'].update({tar_file: cksum}) + json_data['Build_info']['Packages'].update({tar_file: checksum.hexdigest()}) with open(os.path.join(output_dir, 'package.info'), 'w') as f: json.dump(json_data, f, indent=4) except FileNotFoundError: -- 2.16.6