summaryrefslogtreecommitdiffstats
path: root/testing/docker/funsize-update-generator
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /testing/docker/funsize-update-generator
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'testing/docker/funsize-update-generator')
-rw-r--r--testing/docker/funsize-update-generator/Dockerfile35
-rw-r--r--testing/docker/funsize-update-generator/Makefile17
-rw-r--r--testing/docker/funsize-update-generator/dep.pubkey9
-rw-r--r--testing/docker/funsize-update-generator/nightly.pubkey9
-rw-r--r--testing/docker/funsize-update-generator/release.pubkey9
-rw-r--r--testing/docker/funsize-update-generator/requirements.txt2
-rw-r--r--testing/docker/funsize-update-generator/runme.sh25
-rwxr-xr-xtesting/docker/funsize-update-generator/scripts/funsize.py275
-rwxr-xr-xtesting/docker/funsize-update-generator/scripts/mbsdiff_hook.sh135
9 files changed, 516 insertions, 0 deletions
diff --git a/testing/docker/funsize-update-generator/Dockerfile b/testing/docker/funsize-update-generator/Dockerfile
new file mode 100644
index 000000000..afa8290b1
--- /dev/null
+++ b/testing/docker/funsize-update-generator/Dockerfile
@@ -0,0 +1,35 @@
+FROM ubuntu:vivid
+MAINTAINER Rail Aliiev <rail@mozilla.com>
+
+# Required software
+ENV DEBIAN_FRONTEND noninteractive
+# Ubuntu Vivid has been moved to the old-releases repo
+RUN sed -i -e 's/archive.ubuntu.com/old-releases.ubuntu.com/g' /etc/apt/sources.list
+# Chain apt-get commands with apt-get clean in a single docker RUN
+# to make sure that files are removed within a single docker layer
+RUN apt-get update -q && \
+ apt-get install -yyq --no-install-recommends \
+ python python-setuptools python-cryptography libgetopt-simple-perl \
+ bzip2 clamav clamav-freshclam python-requests python-sh curl && \
+ apt-get clean
+RUN useradd -d /home/worker -s /bin/bash -m worker
+COPY requirements.txt /tmp/
+# python-pip installs a lot of dependencies increasing the size of an image
+# drastically. Using easy_install saves us almost 200M.
+RUN easy_install pip
+RUN pip install -r /tmp/requirements.txt
+
+# scripts
+RUN mkdir /home/worker/bin
+COPY scripts/* /home/worker/bin/
+COPY runme.sh /runme.sh
+RUN chmod 755 /home/worker/bin/* /runme.sh
+RUN mkdir /home/worker/keys
+COPY *.pubkey /home/worker/keys/
+# Freshclam may be flaky, retry if it fails
+RUN for i in 1 2 3 4 5; do freshclam --verbose && break || sleep 15; done
+
+ENV HOME /home/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
diff --git a/testing/docker/funsize-update-generator/Makefile b/testing/docker/funsize-update-generator/Makefile
new file mode 100644
index 000000000..ad96cfbf1
--- /dev/null
+++ b/testing/docker/funsize-update-generator/Makefile
@@ -0,0 +1,17 @@
+DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
+IMAGE_NAME = funsize-update-generator
+FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
+
+build:
+ docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
+
+push:
+ docker push $(FULL_IMAGE_NAME):latest
+
+pull:
+ docker pull $(FULL_IMAGE_NAME):latest
+
+update_pubkeys:
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
diff --git a/testing/docker/funsize-update-generator/dep.pubkey b/testing/docker/funsize-update-generator/dep.pubkey
new file mode 100644
index 000000000..a1213a57e
--- /dev/null
+++ b/testing/docker/funsize-update-generator/dep.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzjHSobdeiQ3JHP/cCIOp
+WaX9y12rL5mIo9OR9bpqEZdD0yXJJJeZA887Mv8slqsM+qObMUpKvfEE6zyYPIZJ
+ANib31neI5BBYHhfhf2f5EnkilSYlmU3Gx+uRsmsdt58PpYe124tOAGgca/8bUy3
+eb6kUUTwvMI0oWQuPkGUaoHVQyj/bBMTrIkyF3UbfFtiX/SfOPvIoabNUe+pQHUe
+pqC2+RxzDGj+shTq/hYhtXlptFzsEEb2+0foLy0MY8C30dP2QqbM2iavvr/P8OcS
+Gm3H0TQcRzIEBzvPcIjiZi1nQj/r/3TlYRNCjuYT/HsNLXrB/U5Tc990jjAUJxdH
+0wIDAQAB
+-----END PUBLIC KEY-----
diff --git a/testing/docker/funsize-update-generator/nightly.pubkey b/testing/docker/funsize-update-generator/nightly.pubkey
new file mode 100644
index 000000000..93c0904d5
--- /dev/null
+++ b/testing/docker/funsize-update-generator/nightly.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4j/IS3gWbyVVnWn4ZRnC
+Fuzb6VAaHa0I+4E504ekhVAhbKlSfBstkLbXajdjUVAJpn02zWnOaTl5KAdpDpIp
+SkdA4mK20ej3/Ij7gIt8IwaX+ArXL8mP84pxDn5BgaNADm3206Z6YQzc/TDYu529
+qkDFmLqNUVRJAhPO+qqhKHIcVGh8HUHXN6XV1qOFip+UU0M474jAGgurVmAv8Rh7
+VvM0v5KmB6V6WHwM5gwjg2yRY/o+xYIsNeSes9rpp+MOs/RnUA6LI4WZGY4YahvX
+VclIXBDgbWPYtojexIJkmYj8JIIRsh3eCsrRRe14fq7cBurp3CxBYMlDHf0RUoaq
+hQIDAQAB
+-----END PUBLIC KEY-----
diff --git a/testing/docker/funsize-update-generator/release.pubkey b/testing/docker/funsize-update-generator/release.pubkey
new file mode 100644
index 000000000..20df95946
--- /dev/null
+++ b/testing/docker/funsize-update-generator/release.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvH4r94FpQ0gvr1hhTfV9
+NUeWPJ5CN6TZRq7v/Dc4nkJ1J4IP1B3UEii34tcNKpy1nKupiZuTT6T1zQYT+z5x
+3UkDF9qQboQ8RNb/BEz/cN3on/LTEnZ7YSraRL11M6cEB8mvmJxddCEquwqccRbs
+Usp8WUB7uRv1w6Anley7N9F/LE1iLPwJasZypRnzWb3aYsJy0cMFOYy+OXVdpktn
+qYqlNIjnt84u4Nil6UXnBbIJNUVOCY8wOFClNvVpubjPkWK1gtdWy3x/hJU5RpAO
+K9cnHxq4M/I4SUWTWO3r7yweQiHG4Jyoc7sP1jkwjBkSG93sDEycfwOdOoZft3wN
+sQIDAQAB
+-----END PUBLIC KEY-----
diff --git a/testing/docker/funsize-update-generator/requirements.txt b/testing/docker/funsize-update-generator/requirements.txt
new file mode 100644
index 000000000..58a2d60b7
--- /dev/null
+++ b/testing/docker/funsize-update-generator/requirements.txt
@@ -0,0 +1,2 @@
+mar==1.2
+redo
diff --git a/testing/docker/funsize-update-generator/runme.sh b/testing/docker/funsize-update-generator/runme.sh
new file mode 100644
index 000000000..92094a76e
--- /dev/null
+++ b/testing/docker/funsize-update-generator/runme.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+set -xe
+
+test $TASK_ID
+test $SIGNING_CERT
+
+ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
+ "https://queue.taskcluster.net/v1/task/$TASK_ID"
+
+# enable locale cache
+export MBSDIFF_HOOK="/home/worker/bin/mbsdiff_hook.sh -c /tmp/fs-cache"
+
+if [ ! -z $FILENAME_TEMPLATE ]; then
+ EXTRA_PARAMS="--filename-template $FILENAME_TEMPLATE $EXTRA_PARAMS"
+fi
+
+/home/worker/bin/funsize.py \
+ --artifacts-dir "$ARTIFACTS_DIR" \
+ --task-definition /home/worker/task.json \
+ --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \
+ $EXTRA_PARAMS
diff --git a/testing/docker/funsize-update-generator/scripts/funsize.py b/testing/docker/funsize-update-generator/scripts/funsize.py
new file mode 100755
index 000000000..fd591817c
--- /dev/null
+++ b/testing/docker/funsize-update-generator/scripts/funsize.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python
+
+import ConfigParser
+import argparse
+import functools
+import hashlib
+import json
+import logging
+import os
+import shutil
+import tempfile
+import requests
+import sh
+
+import redo
+from mardor.marfile import MarFile
+
+log = logging.getLogger(__name__)
+ALLOWED_URL_PREFIXES = [
+ "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
+ "http://download.cdn.mozilla.net/pub/firefox/nightly/",
+ "https://mozilla-nightly-updates.s3.amazonaws.com",
+ "https://queue.taskcluster.net/",
+ "http://ftp.mozilla.org/",
+ "http://download.mozilla.org/",
+ "https://archive.mozilla.org/",
+]
+
+DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
+ "{locale}-{from_buildid}-{to_buildid}.partial.mar"
+
+
+def verify_signature(mar, signature):
+ log.info("Checking %s signature", mar)
+ m = MarFile(mar, signature_versions=[(1, signature)])
+ m.verify_signatures()
+
+
+@redo.retriable()
+def download(url, dest, mode=None):
+ log.debug("Downloading %s to %s", url, dest)
+ r = requests.get(url)
+ r.raise_for_status()
+
+ bytes_downloaded = 0
+ with open(dest, 'wb') as fd:
+ for chunk in r.iter_content(4096):
+ fd.write(chunk)
+ bytes_downloaded += len(chunk)
+
+ log.debug('Downloaded %s bytes', bytes_downloaded)
+ if 'content-length' in r.headers:
+ log.debug('Content-Length: %s bytes', r.headers['content-length'])
+ if bytes_downloaded != int(r.headers['content-length']):
+ raise IOError('Unexpected number of bytes downloaded')
+
+ if mode:
+ log.debug("chmod %o %s", mode, dest)
+ os.chmod(dest, mode)
+
+
+def unpack(work_env, mar, dest_dir):
+ os.mkdir(dest_dir)
+ unwrap_cmd = sh.Command(os.path.join(work_env.workdir,
+ "unwrap_full_update.pl"))
+ log.debug("Unwrapping %s", mar)
+ out = unwrap_cmd(mar, _cwd=dest_dir, _env=work_env.env, _timeout=240,
+ _err_to_out=True)
+ if out:
+ log.debug(out)
+
+
+def find_file(directory, filename):
+ log.debug("Searching for %s in %s", filename, directory)
+ for root, dirs, files in os.walk(directory):
+ if filename in files:
+ f = os.path.join(root, filename)
+ log.debug("Found %s", f)
+ return f
+
+
+def get_option(directory, filename, section, option):
+ log.debug("Exctracting [%s]: %s from %s/**/%s", section, option, directory,
+ filename)
+ f = find_file(directory, filename)
+ config = ConfigParser.ConfigParser()
+ config.read(f)
+ rv = config.get(section, option)
+ log.debug("Found %s", rv)
+ return rv
+
+
+def generate_partial(work_env, from_dir, to_dir, dest_mar, channel_ids,
+ version):
+ log.debug("Generating partial %s", dest_mar)
+ env = work_env.env
+ env["MOZ_PRODUCT_VERSION"] = version
+ env["MOZ_CHANNEL_ID"] = channel_ids
+ make_incremental_update = os.path.join(work_env.workdir,
+ "make_incremental_update.sh")
+ out = sh.bash(make_incremental_update, dest_mar, from_dir, to_dir,
+ _cwd=work_env.workdir, _env=env, _timeout=900,
+ _err_to_out=True)
+ if out:
+ log.debug(out)
+
+
+def get_hash(path, hash_type="sha512"):
+ h = hashlib.new(hash_type)
+ with open(path, "rb") as f:
+ for chunk in iter(functools.partial(f.read, 4096), ''):
+ h.update(chunk)
+ return h.hexdigest()
+
+
+class WorkEnv(object):
+
+ def __init__(self):
+ self.workdir = tempfile.mkdtemp()
+
+ def setup(self):
+ self.download_unwrap()
+ self.download_martools()
+
+ def download_unwrap(self):
+ # unwrap_full_update.pl is not too sensitive to the revision
+ url = "https://hg.mozilla.org/mozilla-central/raw-file/default/" \
+ "tools/update-packaging/unwrap_full_update.pl"
+ download(url, dest=os.path.join(self.workdir, "unwrap_full_update.pl"),
+ mode=0o755)
+
+ def download_buildsystem_bits(self, repo, revision):
+ prefix = "{repo}/raw-file/{revision}/tools/update-packaging"
+ prefix = prefix.format(repo=repo, revision=revision)
+ for f in ("make_incremental_update.sh", "common.sh"):
+ url = "{prefix}/{f}".format(prefix=prefix, f=f)
+ download(url, dest=os.path.join(self.workdir, f), mode=0o755)
+
+ def download_martools(self):
+ # TODO: check if the tools have to be branch specific
+ prefix = "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/" \
+ "latest-mozilla-central/mar-tools/linux64"
+ for f in ("mar", "mbsdiff"):
+ url = "{prefix}/{f}".format(prefix=prefix, f=f)
+ download(url, dest=os.path.join(self.workdir, f), mode=0o755)
+
+ def cleanup(self):
+ shutil.rmtree(self.workdir)
+
+ @property
+ def env(self):
+ my_env = os.environ.copy()
+ my_env['LC_ALL'] = 'C'
+ my_env['MAR'] = os.path.join(self.workdir, "mar")
+ my_env['MBSDIFF'] = os.path.join(self.workdir, "mbsdiff")
+ return my_env
+
+
+def verify_allowed_url(mar):
+ if not any(mar.startswith(prefix) for prefix in ALLOWED_URL_PREFIXES):
+ raise ValueError("{mar} is not in allowed URL prefixes: {p}".format(
+ mar=mar, p=ALLOWED_URL_PREFIXES
+ ))
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--artifacts-dir", required=True)
+ parser.add_argument("--signing-cert", required=True)
+ parser.add_argument("--task-definition", required=True,
+ type=argparse.FileType('r'))
+ parser.add_argument("--filename-template",
+ default=DEFAULT_FILENAME_TEMPLATE)
+ parser.add_argument("--no-freshclam", action="store_true", default=False,
+ help="Do not refresh ClamAV DB")
+ parser.add_argument("-q", "--quiet", dest="log_level",
+ action="store_const", const=logging.WARNING,
+ default=logging.DEBUG)
+ args = parser.parse_args()
+
+ logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
+ level=args.log_level)
+ task = json.load(args.task_definition)
+ # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
+
+ if args.no_freshclam:
+ log.info("Skipping freshclam")
+ else:
+ log.info("Refreshing clamav db...")
+ try:
+ redo.retry(lambda: sh.freshclam("--stdout", "--verbose",
+ _timeout=300, _err_to_out=True))
+ log.info("Done.")
+ except sh.ErrorReturnCode:
+ log.warning("Freshclam failed, skipping DB update")
+ manifest = []
+ for e in task["extra"]["funsize"]["partials"]:
+ for mar in (e["from_mar"], e["to_mar"]):
+ verify_allowed_url(mar)
+
+ work_env = WorkEnv()
+ # TODO: run setup once
+ work_env.setup()
+ complete_mars = {}
+ for mar_type, f in (("from", e["from_mar"]), ("to", e["to_mar"])):
+ dest = os.path.join(work_env.workdir, "{}.mar".format(mar_type))
+ unpack_dir = os.path.join(work_env.workdir, mar_type)
+ download(f, dest)
+ if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
+ verify_signature(dest, args.signing_cert)
+ complete_mars["%s_size" % mar_type] = os.path.getsize(dest)
+ complete_mars["%s_hash" % mar_type] = get_hash(dest)
+ unpack(work_env, dest, unpack_dir)
+ log.info("AV-scanning %s ...", unpack_dir)
+ sh.clamscan("-r", unpack_dir, _timeout=600, _err_to_out=True)
+ log.info("Done.")
+
+ path = os.path.join(work_env.workdir, "to")
+ from_path = os.path.join(work_env.workdir, "from")
+ mar_data = {
+ "ACCEPTED_MAR_CHANNEL_IDS": get_option(
+ path, filename="update-settings.ini", section="Settings",
+ option="ACCEPTED_MAR_CHANNEL_IDS"),
+ "version": get_option(path, filename="application.ini",
+ section="App", option="Version"),
+ "to_buildid": get_option(path, filename="application.ini",
+ section="App", option="BuildID"),
+ "from_buildid": get_option(from_path, filename="application.ini",
+ section="App", option="BuildID"),
+ "appName": get_option(from_path, filename="application.ini",
+ section="App", option="Name"),
+ # Use Gecko repo and rev from platform.ini, not application.ini
+ "repo": get_option(path, filename="platform.ini", section="Build",
+ option="SourceRepository"),
+ "revision": get_option(path, filename="platform.ini",
+ section="Build", option="SourceStamp"),
+ "from_mar": e["from_mar"],
+ "to_mar": e["to_mar"],
+ "platform": e["platform"],
+ "locale": e["locale"],
+ }
+ # Override ACCEPTED_MAR_CHANNEL_IDS if needed
+ if "ACCEPTED_MAR_CHANNEL_IDS" in os.environ:
+ mar_data["ACCEPTED_MAR_CHANNEL_IDS"] = os.environ["ACCEPTED_MAR_CHANNEL_IDS"]
+ for field in ("update_number", "previousVersion",
+ "previousBuildNumber", "toVersion",
+ "toBuildNumber"):
+ if field in e:
+ mar_data[field] = e[field]
+ mar_data.update(complete_mars)
+ # if branch not set explicitly use repo-name
+ mar_data["branch"] = e.get("branch",
+ mar_data["repo"].rstrip("/").split("/")[-1])
+ mar_name = args.filename_template.format(**mar_data)
+ mar_data["mar"] = mar_name
+ dest_mar = os.path.join(work_env.workdir, mar_name)
+ # TODO: download these once
+ work_env.download_buildsystem_bits(repo=mar_data["repo"],
+ revision=mar_data["revision"])
+ generate_partial(work_env, from_path, path, dest_mar,
+ mar_data["ACCEPTED_MAR_CHANNEL_IDS"],
+ mar_data["version"])
+ mar_data["size"] = os.path.getsize(dest_mar)
+ mar_data["hash"] = get_hash(dest_mar)
+
+ shutil.copy(dest_mar, args.artifacts_dir)
+ work_env.cleanup()
+ manifest.append(mar_data)
+ manifest_file = os.path.join(args.artifacts_dir, "manifest.json")
+ with open(manifest_file, "w") as fp:
+ json.dump(manifest, fp, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/docker/funsize-update-generator/scripts/mbsdiff_hook.sh b/testing/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
new file mode 100755
index 000000000..0b677a5e9
--- /dev/null
+++ b/testing/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
@@ -0,0 +1,135 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool contains functions that are to be used to handle/enable funsize
+# Author: Mihai Tabara
+#
+
+HOOK=
+SERVER_URL=
+LOCAL_CACHE_DIR=
+
+getsha512(){
+ echo "$(openssl sha512 "${1}" | awk '{print $2}')"
+}
+
+print_usage(){
+ echo "$(basename $0) -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH"
+ echo "Script that saves/retrieves from cache presumptive patches as args"
+ echo ""
+ echo "-A SERVER-URL - host where to send the files"
+ echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached"
+ echo "-g pre hook - tests whether patch already in cache"
+ echo "-u post hook - upload patch to cache for future use"
+ echo ""
+ echo "PATH-FROM-URL : path on disk for source file"
+ echo "PATH-TO-URL : path on disk for destination file"
+ echo "PATH-PATCH : path on disk for patch between source and destination"
+}
+
+upload_patch(){
+ sha_from=`getsha512 "$1"`
+ sha_to=`getsha512 "$2"`
+ patch_path="$3"
+
+ # save to local cache first
+ if [ -n "$LOCAL_CACHE_DIR" ]; then
+ local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from""
+ if `$local_cmd` >&2; then
+ cp -avf "$patch_path" "$LOCAL_CACHE_DIR/$sha_from/$sha_to"
+ echo "$patch_path saved on local cache!"
+ fi
+ fi
+ # The remote cache implementation is not used. The code is for usage
+ # reference only.
+ return 0
+
+ # send it over to funsize
+ cmd="curl -sSw %{http_code} -o /dev/null -X POST $SERVER_URL -F sha_from="$sha_from" -F sha_to="$sha_to" -F patch_file="@$patch_path""
+ ret_code=`$cmd`
+
+ if [ $ret_code -eq 200 ]; then
+ echo "$patch_path Successful uploaded to funsize!"
+ return 0
+ fi
+
+ echo "$patch_path Failed to be uploaded to funsize!"
+ return 1
+}
+
+get_patch(){
+ sha_from=`getsha512 "$1"`
+ sha_to=`getsha512 "$2"`
+ destination_file="$3"
+ tmp_file="$destination_file.tmp"
+
+ # try to retrieve from local cache first
+ if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then
+ cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file"
+ echo "Successful retrieved $destination_file from local cache!"
+ return 0
+ else
+ echo "File is not in the locale cache"
+ return 1
+ fi
+ # The remote cache implementation is not used. The code is for usage
+ # reference only.
+
+ # if unsuccessful, try to retrieve from funsize
+ cmd="curl -LsSGw %{http_code} $SERVER_URL/$sha_from/$sha_to -o $tmp_file"
+ ret_code=`$cmd`
+
+ if [ $ret_code -eq 200 ]; then
+ mv "$tmp_file" "$destination_file"
+ echo "Successful retrieved $destination_file from funsize!"
+ return 0
+ fi
+
+ rm -f "$tmp_file"
+ echo "Failed to retrieve $destination_file from funsize!"
+ return 1
+}
+
+OPTIND=1
+
+while getopts ":A:c:gu" option; do
+ case $option in
+ A)
+ SERVER_URL="$OPTARG"
+ ;;
+ c)
+ LOCAL_CACHE_DIR="$OPTARG"
+ ;;
+ g)
+ HOOK="PRE"
+ ;;
+ u)
+ HOOK="POST"
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ print_usage
+ exit 1
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ print_usage
+ exit 1
+ ;;
+ *)
+ echo "Unimplemented option: -$OPTARG" >&2
+ print_usage
+ exit 1
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+if [ "$HOOK" == "PRE" ]; then
+ get_patch "$1" "$2" "$3"
+elif [ "$HOOK" == "POST" ]; then
+ upload_patch "$1" "$2" "$3"
+fi