From 5f8de423f190bbb79a62f804151bc24824fa32d8 Mon Sep 17 00:00:00 2001 From: "Matt A. Tobin" Date: Fri, 2 Feb 2018 04:16:08 -0500 Subject: Add m-esr52 at 52.6.0 --- testing/docker/desktop-test/tc-vcs-config.yml | 40 +++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 testing/docker/desktop-test/tc-vcs-config.yml (limited to 'testing/docker/desktop-test/tc-vcs-config.yml') diff --git a/testing/docker/desktop-test/tc-vcs-config.yml b/testing/docker/desktop-test/tc-vcs-config.yml new file mode 100644 index 000000000..25e13ee40 --- /dev/null +++ b/testing/docker/desktop-test/tc-vcs-config.yml @@ -0,0 +1,40 @@ +# Default configuration used by the tc-vs tools these can be overridden by +# passing the config you wish to use over the command line... +git: git +hg: hg + +repoCache: + # Repo url to clone when running repo init.. + repoUrl: https://gerrit.googlesource.com/git-repo.git + # Version of repo to utilize... + repoRevision: master + # The root where all downloaded cache files are stored on the local machine... + cacheDir: '{{env.HOME}}/.tc-vcs-repo/' + # Name/prefixed used as part of the base url. + cacheName: sources/{{name}}.tar.gz + # Command used to upload the tarball + uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'" + # Large http get requests are often slower using nodes built in http layer so + # we utilize a subprocess which is responsible for fetching... + get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}} + # Used to create clone tarball + compress: tar -czf {{dest}} {{source}} + # All cache urls use tar + gz this is the command used to extract those files + # downloaded by the "get" command. + extract: tar -x -z -C {{dest}} -f {{source}} + +cloneCache: + # The root where all downloaded cache files are stored on the local machine... + cacheDir: '{{env.HOME}}/.tc-vcs/' + # Command used to upload the tarball + uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'" + # Large http get requests are often slower using nodes built in http layer so + # we utilize a subprocess which is responsible for fetching... + get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}} + # Used to create clone tarball + compress: tar -czf {{dest}} {{source}} + # All cache urls use tar + gz this is the command used to extract those files + # downloaded by the "get" command. + extract: tar -x -z --strip-components 1 -C {{dest}} -f {{source}} + # Name/prefixed used as part of the base url. + cacheName: clones/{{name}}.tar.gz -- cgit v1.2.3