node_modules_cache.py 3.41 KB
Newer Older
1
2
3
4
5
#!/usr/bin/env python2.7
from __future__ import absolute_import, unicode_literals, print_function, division

from sys import argv
from os import environ, stat, remove as _delete_file
6
from os.path import isfile, dirname, basename, abspath
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from hashlib import sha256
from subprocess import check_call as run

from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError


NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
    BUCKET_NAME = environ['TWBS_S3_BUCKET']
except KeyError:
    raise SystemExit("TWBS_S3_BUCKET environment variable not set!")


def _sha256_of_file(filename):
    hasher = sha256()
    with open(filename, 'rb') as input_file:
        hasher.update(input_file.read())
27
28
29
    file_hash = hasher.hexdigest()
    print('sha256({}) = {}'.format(filename, file_hash))
    return file_hash
30
31
32
33
34
35
36
37
38


def _delete_file_quietly(filename):
    try:
        _delete_file(filename)
    except (OSError, IOError):
        pass


39
40
def _tarball_size(directory):
    kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
41
42
43
    return "{} MiB".format(kib)


44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
def _tarball_filename_for(directory):
    return abspath('./{}.tar.gz'.format(basename(directory)))


def _create_tarball(directory):
    print("Creating tarball of {}...".format(directory))
    run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])


def _extract_tarball(directory):
    print("Extracting tarball of {}...".format(directory))
    run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])


def download(directory):
    _delete_file_quietly(NEED_TO_UPLOAD_MARKER)
    try:
        print("Downloading {} tarball from S3...".format(basename(directory)))
        key.get_contents_to_filename(_tarball_filename_for(directory))
    except S3ResponseError as err:
        open(NEED_TO_UPLOAD_MARKER, 'a').close()
        print(err)
        raise SystemExit("Cached {} download failed!".format(basename(directory)))
    print("Downloaded {}.".format(_tarball_size(directory)))
    _extract_tarball(directory)
    print("{} successfully installed from cache.".format(directory))


def upload(directory):
    _create_tarball(directory)
    print("Uploading {} tarball to S3... ({})".format(basename(directory), _tarball_size(directory)))
    key.set_contents_from_filename(_tarball_filename_for(directory))
    print("{} cache successfully updated.".format(directory))
    _delete_file_quietly(NEED_TO_UPLOAD_MARKER)


80
81
82
83
84
if __name__ == '__main__':
    # Uses environment variables:
    #   AWS_ACCESS_KEY_ID - AWS Access Key ID
    #   AWS_SECRET_ACCESS_KEY - AWS Secret Access Key
    argv.pop(0)
85
86
87
    if len(argv) != 3:
        raise SystemExit("USAGE: node_modules_cache.py <download | upload> <dependencies file> <directory>")
    mode, dependencies_file, directory = argv
88
89
90
91
92
93

    conn = S3Connection()
    bucket = conn.lookup(BUCKET_NAME)
    if bucket is None:
        raise SystemExit("Could not access bucket!")

94
    dependencies_file_hash = _sha256_of_file(dependencies_file)
95

96
    key = Key(bucket, dependencies_file_hash)
97
98
99
    key.storage_class = 'REDUCED_REDUNDANCY'

    if mode == 'download':
100
        download(directory)
101
    elif mode == 'upload':
102
103
        if isfile(NEED_TO_UPLOAD_MARKER):  # FIXME
            upload(directory)
104
105
106
107
        else:
            print("No need to upload anything.")
    else:
        raise SystemExit("Unrecognized mode {!r}".format(mode))