1 #!/usr/bin/env python2.7
2 # pylint: disable=C0301
3 from __future__ import absolute_import, unicode_literals, print_function, division
6 from os import environ, stat, chdir, remove as _delete_file
7 from os.path import dirname, basename, abspath, realpath, expandvars
8 from hashlib import sha256
9 from subprocess import check_call as run
10 from json import load, dump as save
11 from contextlib import contextmanager
12 from datetime import datetime
14 from boto.s3.connection import S3Connection
15 from boto.s3.key import Key
16 from boto.exception import S3ResponseError
19 CONFIG_FILE = './S3Cachefile.json'
20 UPLOAD_TODO_FILE = './S3CacheTodo.json'
21 BYTES_PER_MB = 1024 * 1024
26 start = datetime.utcnow()
28 end = datetime.utcnow()
30 print("\tDone. Took", int(elapsed.total_seconds()), "second(s).")
34 def todo_file(writeback=True):
36 with open(UPLOAD_TODO_FILE, 'rt') as json_file:
37 todo = load(json_file)
38 except (IOError, OSError, ValueError):
45 with open(UPLOAD_TODO_FILE, 'wt') as json_file:
47 except (OSError, IOError) as save_err:
48 print("Error saving {}:".format(UPLOAD_TODO_FILE), save_err)
51 def _sha256_of_file(filename):
53 with open(filename, 'rb') as input_file:
54 hasher.update(input_file.read())
55 file_hash = hasher.hexdigest()
56 print('sha256({}) = {}'.format(filename, file_hash))
60 def _delete_file_quietly(filename):
62 _delete_file(filename)
63 except (OSError, IOError):
67 def mark_needs_uploading(cache_name):
68 with todo_file() as todo:
69 todo[cache_name] = True
72 def mark_uploaded(cache_name):
73 with todo_file() as todo:
74 todo.pop(cache_name, None)
77 def need_to_upload(cache_name):
78 with todo_file(writeback=False) as todo:
79 return todo.get(cache_name, False)
82 def _tarball_size(directory):
83 kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
84 return "{} MiB".format(kib)
87 def _tarball_filename_for(directory):
88 return abspath('./{}.tar.gz'.format(basename(directory)))
91 def _create_tarball(directory):
92 print("Creating tarball of {}...".format(directory))
94 run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
97 def _extract_tarball(directory):
98 print("Extracting tarball of {}...".format(directory))
100 run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
103 def download(directory):
104 mark_uploaded(cache_name) # reset
106 print("Downloading {} tarball from S3...".format(cache_name))
108 key.get_contents_to_filename(_tarball_filename_for(directory))
109 except S3ResponseError as err:
110 mark_needs_uploading(cache_name)
111 raise SystemExit("Cached {} download failed!".format(cache_name))
112 print("Downloaded {}.".format(_tarball_size(directory)))
113 _extract_tarball(directory)
114 print("{} successfully installed from cache.".format(cache_name))
117 def upload(directory):
118 _create_tarball(directory)
119 print("Uploading {} tarball to S3... ({})".format(cache_name, _tarball_size(directory)))
121 key.set_contents_from_filename(_tarball_filename_for(directory))
122 print("{} cache successfully updated.".format(cache_name))
123 mark_uploaded(cache_name)
126 if __name__ == '__main__':
127 # Uses environment variables:
128 # AWS_ACCESS_KEY_ID -- AWS Access Key ID
129 # AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
132 raise SystemExit("USAGE: s3_cache.py <download | upload> <cache name>")
133 mode, cache_name = argv
134 script_dir = dirname(realpath(__file__))
137 with open(CONFIG_FILE, 'rt') as config_file:
138 config = load(config_file)
139 except (IOError, OSError, ValueError) as config_err:
141 raise SystemExit("Error when trying to load config from JSON file!")
144 cache_info = config[cache_name]
145 key_file = expandvars(cache_info["key"])
146 fallback_cmd = cache_info["generate"]
147 directory = expandvars(cache_info["cache"])
148 except (TypeError, KeyError) as load_err:
150 raise SystemExit("Config for cache named {!r} is missing or malformed!".format(cache_name))
154 BUCKET_NAME = environ['TWBS_S3_BUCKET']
156 raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
158 conn = S3Connection()
159 bucket = conn.lookup(BUCKET_NAME)
161 raise SystemExit("Could not access bucket!")
163 key_file_hash = _sha256_of_file(key_file)
165 key = Key(bucket, key_file_hash)
166 key.storage_class = 'REDUCED_REDUNDANCY'
168 if mode == 'download':
170 elif mode == 'upload':
171 if need_to_upload(cache_name):
174 print("No need to upload anything.")
176 raise SystemExit("Unrecognized mode {!r}".format(mode))
177 except BaseException as exc:
178 if mode != 'download':
180 print("Error!:", exc)
181 print("Unable to download from cache.")
182 print("Running fallback command to generate cache directory {!r}: {}".format(directory, fallback_cmd))
184 run(fallback_cmd, shell=True)