diff --git a/.travis.yml b/.travis.yml
index b9c051250b1cb96ea6ea8fba2df8e5482788826f..471c864ca62133e7ca3b4fd10f590772e84ef4db 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,11 +8,11 @@ before_install:
   - if [ "$TWBS_TEST" = validate-html ]; then echo "ruby=$(basename $GEMDIR) jekyll=$JEKYLL_VERSION" > pseudo_Gemfile.lock; fi
 install:
   - time npm install -g grunt-cli
-  - ./test-infra/s3_cache.py download 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules || time ./test-infra/uncached-npm-install.sh
-  - if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py download rubygems pseudo_Gemfile.lock $GEMDIR || time gem install -N jekyll -v $JEKYLL_VERSION; fi
+  - ./test-infra/s3_cache.py download npm-modules
+  - if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py download rubygems; fi
 after_script:
-  - if [ "$TWBS_TEST" = core ]; then ./test-infra/s3_cache.py upload 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules; fi
-  - if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py upload rubygems pseudo_Gemfile.lock $GEMDIR; fi
+  - if [ "$TWBS_TEST" = core ]; then ./test-infra/s3_cache.py upload npm-modules; fi
+  - if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py upload rubygems; fi
 env:
   global:
     - JEKYLL_VERSION: 1.5.0
diff --git a/test-infra/S3Cachefile.json b/test-infra/S3Cachefile.json
new file mode 100644
index 0000000000000000000000000000000000000000..f5c011939574cdbd25fa05e4020472c862dc268c
--- /dev/null
+++ b/test-infra/S3Cachefile.json
@@ -0,0 +1,12 @@
+{
+    "npm-modules": {
+        "key": "./npm-shrinkwrap.canonical.json",
+        "cache": "../node_modules",
+        "generate": "./uncached-npm-install.sh"
+    },
+    "rubygems": {
+        "key": "../pseudo_Gemfile.lock",
+        "cache": "$GEMDIR",
+        "generate": "gem install -N jekyll -v $JEKYLL_VERSION"
+    }
+}
diff --git a/test-infra/s3_cache.py b/test-infra/s3_cache.py
index afa623bfbde987db93128a4775289c3dd2b54ec8..8f8ffd11df73f594e1482d691f18ea719b89c7c1 100755
--- a/test-infra/s3_cache.py
+++ b/test-infra/s3_cache.py
@@ -2,10 +2,11 @@
 from __future__ import absolute_import, unicode_literals, print_function, division
 
 from sys import argv
-from os import environ, stat, remove as _delete_file
-from os.path import isfile, dirname, basename, abspath
+from os import environ, stat, chdir, remove as _delete_file
+from os.path import isfile, dirname, basename, abspath, realpath, expandvars
 from hashlib import sha256
 from subprocess import check_call as run
+from json import load
 from contextlib import contextmanager
 from datetime import datetime
 
@@ -14,12 +15,9 @@ from boto.s3.key import Key
 from boto.exception import S3ResponseError
 
 
+CONFIG_FILE = './S3Cachefile.json'
 NEED_TO_UPLOAD_MARKER = '.need-to-upload'
 BYTES_PER_MB = 1024 * 1024
-try:
-    BUCKET_NAME = environ['TWBS_S3_BUCKET']
-except KeyError:
-    raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
 
 
 @contextmanager
@@ -71,24 +69,24 @@ def _extract_tarball(directory):
 def download(directory):
     _delete_file_quietly(NEED_TO_UPLOAD_MARKER)
     try:
-        print("Downloading {} tarball from S3...".format(friendly_name))
+        print("Downloading {} tarball from S3...".format(cache_name))
         with timer():
             key.get_contents_to_filename(_tarball_filename_for(directory))
     except S3ResponseError as err:
         open(NEED_TO_UPLOAD_MARKER, 'a').close()
         print(err)
-        raise SystemExit("Cached {} download failed!".format(friendly_name))
+        raise SystemExit("Cached {} download failed!".format(cache_name))
     print("Downloaded {}.".format(_tarball_size(directory)))
     _extract_tarball(directory)
-    print("{} successfully installed from cache.".format(friendly_name))
+    print("{} successfully installed from cache.".format(cache_name))
 
 
 def upload(directory):
     _create_tarball(directory)
-    print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
+    print("Uploading {} tarball to S3... ({})".format(cache_name, _tarball_size(directory)))
     with timer():
         key.set_contents_from_filename(_tarball_filename_for(directory))
-    print("{} cache successfully updated.".format(friendly_name))
+    print("{} cache successfully updated.".format(cache_name))
     _delete_file_quietly(NEED_TO_UPLOAD_MARKER)
 
 
@@ -97,26 +95,57 @@ if __name__ == '__main__':
     #   AWS_ACCESS_KEY_ID -- AWS Access Key ID
     #   AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
     argv.pop(0)
-    if len(argv) != 4:
-        raise SystemExit("USAGE: s3_cache.py <download | upload> <friendly name> <dependencies file> <directory>")
-    mode, friendly_name, dependencies_file, directory = argv
-
-    conn = S3Connection()
-    bucket = conn.lookup(BUCKET_NAME)
-    if bucket is None:
-        raise SystemExit("Could not access bucket!")
-
-    dependencies_file_hash = _sha256_of_file(dependencies_file)
+    if len(argv) != 2:
+        raise SystemExit("USAGE: s3_cache.py <download | upload> <cache name>")
+    mode, cache_name = argv
+    script_dir = dirname(realpath(__file__))
+    chdir(script_dir)
+    try:
+        with open(CONFIG_FILE, 'rt') as config_file:
+            config = load(config_file)
+    except (IOError, OSError, ValueError) as config_err:
+        print(config_err)
+        raise SystemExit("Error when trying to load config from JSON file!")
 
-    key = Key(bucket, dependencies_file_hash)
-    key.storage_class = 'REDUCED_REDUNDANCY'
+    try:
+        cache_info = config[cache_name]
+        key_file = expandvars(cache_info["key"])
+        fallback_cmd = cache_info["generate"]
+        directory = expandvars(cache_info["cache"])
+    except (TypeError, KeyError) as load_err:
+        print(load_err)
+        raise SystemExit("Config for cache named {!r} is missing or malformed!".format(cache_name))
 
-    if mode == 'download':
-        download(directory)
-    elif mode == 'upload':
-        if isfile(NEED_TO_UPLOAD_MARKER):  # FIXME
-            upload(directory)
+    try:
+        try:
+            BUCKET_NAME = environ['TWBS_S3_BUCKET']
+        except KeyError:
+            raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
+
+        conn = S3Connection()
+        bucket = conn.lookup(BUCKET_NAME)
+        if bucket is None:
+            raise SystemExit("Could not access bucket!")
+
+        key_file_hash = _sha256_of_file(key_file)
+
+        key = Key(bucket, key_file_hash)
+        key.storage_class = 'REDUCED_REDUNDANCY'
+
+        if mode == 'download':
+            download(directory)
+        elif mode == 'upload':
+            if isfile(NEED_TO_UPLOAD_MARKER):  # FIXME
+                upload(directory)
+            else:
+                print("No need to upload anything.")
         else:
-            print("No need to upload anything.")
-    else:
-        raise SystemExit("Unrecognized mode {!r}".format(mode))
+            raise SystemExit("Unrecognized mode {!r}".format(mode))
+    except BaseException as exc:
+        if mode != 'download':
+            raise
+        print("Error!:", exc)
+        print("Unable to download from cache.")
+        print("Running fallback command to generate cache directory {!r}: {}".format(directory, fallback_cmd))
+        with timer():
+            run(fallback_cmd, shell=True)
diff --git a/test-infra/uncached-npm-install.sh b/test-infra/uncached-npm-install.sh
index 49c75192baeda0448b5e0bc4dc56940b98ab2b02..1950ff967fe9a43d83db1bb3615e469d112ab71f 100755
--- a/test-infra/uncached-npm-install.sh
+++ b/test-infra/uncached-npm-install.sh
@@ -1,5 +1,6 @@
 #!/bin/bash
 set -e
+cd ..  # /bootstrap/
 cp test-infra/npm-shrinkwrap.canonical.json npm-shrinkwrap.json
 npm install
 rm npm-shrinkwrap.json