summaryrefslogtreecommitdiffstats
path: root/testing/mozharness
diff options
context:
space:
mode:
Diffstat (limited to 'testing/mozharness')
-rw-r--r--testing/mozharness/LICENSE373
-rw-r--r--testing/mozharness/README.txt32
-rw-r--r--testing/mozharness/configs/android/androidarm.py459
-rw-r--r--testing/mozharness/configs/android/androidarm_4_3-tc.py10
-rw-r--r--testing/mozharness/configs/android/androidarm_4_3.py383
-rw-r--r--testing/mozharness/configs/android/androidarm_dev.py9
-rw-r--r--testing/mozharness/configs/android/androidx86-tc.py73
-rw-r--r--testing/mozharness/configs/android/androidx86.py182
-rw-r--r--testing/mozharness/configs/balrog/docker-worker.py18
-rw-r--r--testing/mozharness/configs/balrog/production.py28
-rw-r--r--testing/mozharness/configs/balrog/staging.py14
-rw-r--r--testing/mozharness/configs/beetmover/en_us_build.yml.tmpl191
-rw-r--r--testing/mozharness/configs/beetmover/en_us_signing.yml.tmpl66
-rw-r--r--testing/mozharness/configs/beetmover/l10n_changesets.tmpl11
-rw-r--r--testing/mozharness/configs/beetmover/partials.yml.tmpl16
-rw-r--r--testing/mozharness/configs/beetmover/repacks.yml.tmpl65
-rw-r--r--testing/mozharness/configs/beetmover/snap.yml.tmpl11
-rw-r--r--testing/mozharness/configs/beetmover/snap_checksums.yml.tmpl14
-rw-r--r--testing/mozharness/configs/beetmover/source.yml.tmpl14
-rw-r--r--testing/mozharness/configs/beetmover/source_checksums.yml.tmpl14
-rw-r--r--testing/mozharness/configs/builds/branch_specifics.py469
-rw-r--r--testing/mozharness/configs/builds/build_pool_specifics.py44
-rw-r--r--testing/mozharness/configs/builds/releng_base_android_64_builds.py111
-rw-r--r--testing/mozharness/configs/builds/releng_base_linux_32_builds.py160
-rw-r--r--testing/mozharness/configs/builds/releng_base_linux_64_builds.py139
-rw-r--r--testing/mozharness/configs/builds/releng_base_mac_64_builds.py79
-rw-r--r--testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py83
-rw-r--r--testing/mozharness/configs/builds/releng_base_windows_32_builds.py95
-rw-r--r--testing/mozharness/configs/builds/releng_base_windows_64_builds.py93
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15.py8
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_debug.py9
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle.py18
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle_dependencies.py21
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_partner_sample1.py9
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_checkstyle.py11
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_lint.py11
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_test.py11
-rw-r--r--testing/mozharness/configs/builds/releng_sub_android_configs/64_x86.py8
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/32_artifact.py116
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug.py45
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug_artifact.py122
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py43
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py98
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan.py48
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_and_debug.py49
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc.py48
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc_and_debug.py49
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_code_coverage.py45
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug.py45
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug_artifact.py96
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_source.py20
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py50
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py88
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_tsan.py46
-rw-r--r--testing/mozharness/configs/builds/releng_sub_linux_configs/64_valgrind.py49
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_add-on-devel.py44
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_artifact.py65
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_debug.py43
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_opt.py39
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_universal.py4
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug.py44
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug_artifact.py65
-rw-r--r--testing/mozharness/configs/builds/releng_sub_mac_configs/64_stat_and_debug.py48
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/32_add-on-devel.py38
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/32_artifact.py81
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug.py40
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug_artifact.py86
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/32_stat_and_debug.py44
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/64_add-on-devel.py37
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/64_artifact.py79
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug.py39
-rw-r--r--testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug_artifact.py85
-rw-r--r--testing/mozharness/configs/builds/taskcluster_firefox_win32_debug.py91
-rw-r--r--testing/mozharness/configs/builds/taskcluster_firefox_win32_opt.py89
-rw-r--r--testing/mozharness/configs/builds/taskcluster_firefox_win64_debug.py87
-rw-r--r--testing/mozharness/configs/builds/taskcluster_firefox_win64_opt.py85
-rw-r--r--testing/mozharness/configs/developer_config.py49
-rw-r--r--testing/mozharness/configs/disable_signing.py3
-rw-r--r--testing/mozharness/configs/firefox_ui_tests/qa_jenkins.py19
-rw-r--r--testing/mozharness/configs/firefox_ui_tests/releng_release.py33
-rw-r--r--testing/mozharness/configs/firefox_ui_tests/taskcluster.py11
-rw-r--r--testing/mozharness/configs/hazards/build_browser.py4
-rw-r--r--testing/mozharness/configs/hazards/build_shell.py4
-rw-r--r--testing/mozharness/configs/hazards/common.py104
-rw-r--r--testing/mozharness/configs/marionette/prod_config.py56
-rw-r--r--testing/mozharness/configs/marionette/test_config.py29
-rw-r--r--testing/mozharness/configs/marionette/windows_config.py57
-rw-r--r--testing/mozharness/configs/marionette/windows_taskcluster_config.py56
-rw-r--r--testing/mozharness/configs/mediatests/buildbot_posix_config.py50
-rw-r--r--testing/mozharness/configs/mediatests/buildbot_windows_config.py56
-rwxr-xr-xtesting/mozharness/configs/mediatests/jenkins_config.py48
-rw-r--r--testing/mozharness/configs/mediatests/taskcluster_posix_config.py47
-rw-r--r--testing/mozharness/configs/mediatests/taskcluster_windows_config.py50
-rw-r--r--testing/mozharness/configs/merge_day/aurora_to_beta.py83
-rw-r--r--testing/mozharness/configs/merge_day/beta_to_release.py53
-rw-r--r--testing/mozharness/configs/merge_day/bump_esr.py24
-rw-r--r--testing/mozharness/configs/merge_day/central_to_aurora.py100
-rw-r--r--testing/mozharness/configs/merge_day/release_to_esr.py54
-rw-r--r--testing/mozharness/configs/merge_day/staging_beta_migration.py22
-rw-r--r--testing/mozharness/configs/multi_locale/android-mozharness-build.json5
-rw-r--r--testing/mozharness/configs/multi_locale/ash_android-x86.json28
-rw-r--r--testing/mozharness/configs/multi_locale/ash_android.json27
-rw-r--r--testing/mozharness/configs/multi_locale/b2g_linux32.py2
-rw-r--r--testing/mozharness/configs/multi_locale/b2g_linux64.py2
-rw-r--r--testing/mozharness/configs/multi_locale/b2g_macosx64.py2
-rw-r--r--testing/mozharness/configs/multi_locale/b2g_win32.py8
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-aurora_android-armv6.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-aurora_android-x86.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-aurora_android.json27
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-beta_android-armv6.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-beta_android-x86.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-beta_android.json27
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-central_android-armv6.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-central_android-x86.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-central_android.json27
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-release_android-armv6.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-release_android-x86.json28
-rw-r--r--testing/mozharness/configs/multi_locale/mozilla-release_android.json27
-rw-r--r--testing/mozharness/configs/multi_locale/release_mozilla-beta_android-armv6.json34
-rw-r--r--testing/mozharness/configs/multi_locale/release_mozilla-beta_android-x86.json34
-rw-r--r--testing/mozharness/configs/multi_locale/release_mozilla-beta_android.json33
-rw-r--r--testing/mozharness/configs/multi_locale/release_mozilla-release_android-armv6.json34
-rw-r--r--testing/mozharness/configs/multi_locale/release_mozilla-release_android-x86.json34
-rw-r--r--testing/mozharness/configs/multi_locale/release_mozilla-release_android.json33
-rw-r--r--testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-armv6.json34
-rw-r--r--testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-x86.json34
-rw-r--r--testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android.json33
-rw-r--r--testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-armv6.json34
-rw-r--r--testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-x86.json34
-rw-r--r--testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android.json33
-rw-r--r--testing/mozharness/configs/multi_locale/standalone_mozilla-central.py49
-rw-r--r--testing/mozharness/configs/partner_repacks/release_mozilla-esr52_desktop.py6
-rw-r--r--testing/mozharness/configs/partner_repacks/release_mozilla-release_android.py47
-rw-r--r--testing/mozharness/configs/partner_repacks/release_mozilla-release_desktop.py6
-rw-r--r--testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_android.py52
-rw-r--r--testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_desktop.py6
-rw-r--r--testing/mozharness/configs/platform_supports_post_upload_to_latest.py3
-rw-r--r--testing/mozharness/configs/releases/bouncer_fennec.py22
-rw-r--r--testing/mozharness/configs/releases/bouncer_firefox_beta.py148
-rw-r--r--testing/mozharness/configs/releases/bouncer_firefox_esr.py136
-rw-r--r--testing/mozharness/configs/releases/bouncer_firefox_release.py191
-rw-r--r--testing/mozharness/configs/releases/bouncer_thunderbird.py169
-rw-r--r--testing/mozharness/configs/releases/dev_bouncer_firefox_beta.py133
-rw-r--r--testing/mozharness/configs/releases/dev_postrelease_firefox_beta.py20
-rw-r--r--testing/mozharness/configs/releases/dev_postrelease_firefox_release.py22
-rw-r--r--testing/mozharness/configs/releases/dev_updates_firefox_beta.py39
-rw-r--r--testing/mozharness/configs/releases/dev_updates_firefox_release.py50
-rw-r--r--testing/mozharness/configs/releases/postrelease_firefox_beta.py18
-rw-r--r--testing/mozharness/configs/releases/postrelease_firefox_esr52.py22
-rw-r--r--testing/mozharness/configs/releases/postrelease_firefox_release.py22
-rw-r--r--testing/mozharness/configs/releases/updates_firefox_beta.py35
-rw-r--r--testing/mozharness/configs/releases/updates_firefox_esr52.py35
-rw-r--r--testing/mozharness/configs/releases/updates_firefox_release.py47
-rw-r--r--testing/mozharness/configs/releng_infra_configs/builders.py47
-rw-r--r--testing/mozharness/configs/releng_infra_configs/linux.py5
-rw-r--r--testing/mozharness/configs/releng_infra_configs/linux64.py5
-rw-r--r--testing/mozharness/configs/releng_infra_configs/macosx64.py5
-rw-r--r--testing/mozharness/configs/releng_infra_configs/testers.py67
-rw-r--r--testing/mozharness/configs/releng_infra_configs/win32.py5
-rw-r--r--testing/mozharness/configs/releng_infra_configs/win64.py5
-rw-r--r--testing/mozharness/configs/remove_executables.py8
-rw-r--r--testing/mozharness/configs/routes.json18
-rw-r--r--testing/mozharness/configs/selfserve/production.py3
-rw-r--r--testing/mozharness/configs/selfserve/staging.py3
-rw-r--r--testing/mozharness/configs/servo/mac.py3
-rw-r--r--testing/mozharness/configs/single_locale/alder.py46
-rw-r--r--testing/mozharness/configs/single_locale/ash.py46
-rw-r--r--testing/mozharness/configs/single_locale/ash_android-api-15.py97
-rw-r--r--testing/mozharness/configs/single_locale/dev-mozilla-beta.py37
-rw-r--r--testing/mozharness/configs/single_locale/dev-mozilla-release.py37
-rw-r--r--testing/mozharness/configs/single_locale/linux.py123
l---------testing/mozharness/configs/single_locale/linux32.py1
-rw-r--r--testing/mozharness/configs/single_locale/linux64.py103
-rw-r--r--testing/mozharness/configs/single_locale/macosx64.py72
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-aurora.py29
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-aurora_android-api-15.py97
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-beta.py37
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-central.py29
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-central_android-api-15.py97
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-esr52.py37
-rw-r--r--testing/mozharness/configs/single_locale/mozilla-release.py37
-rw-r--r--testing/mozharness/configs/single_locale/production.py14
-rw-r--r--testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_15.py97
-rw-r--r--testing/mozharness/configs/single_locale/release_mozilla-release_android_api_15.py97
-rw-r--r--testing/mozharness/configs/single_locale/staging.py17
-rw-r--r--testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_15.py97
-rw-r--r--testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_15.py97
-rw-r--r--testing/mozharness/configs/single_locale/tc_android-api-15.py18
-rw-r--r--testing/mozharness/configs/single_locale/tc_linux32.py24
-rw-r--r--testing/mozharness/configs/single_locale/tc_linux64.py24
-rw-r--r--testing/mozharness/configs/single_locale/try.py42
-rw-r--r--testing/mozharness/configs/single_locale/try_android-api-15.py97
-rw-r--r--testing/mozharness/configs/single_locale/win32.py77
-rw-r--r--testing/mozharness/configs/single_locale/win64.py77
-rw-r--r--testing/mozharness/configs/talos/linux_config.py46
-rw-r--r--testing/mozharness/configs/talos/mac_config.py56
-rw-r--r--testing/mozharness/configs/talos/windows_config.py48
-rw-r--r--testing/mozharness/configs/taskcluster_nightly.py5
-rw-r--r--testing/mozharness/configs/test/example_config1.json5
-rw-r--r--testing/mozharness/configs/test/example_config2.py5
-rw-r--r--testing/mozharness/configs/test/test.illegal_suffix20
-rw-r--r--testing/mozharness/configs/test/test.json20
-rw-r--r--testing/mozharness/configs/test/test.py22
-rw-r--r--testing/mozharness/configs/test/test_malformed.json20
-rw-r--r--testing/mozharness/configs/test/test_malformed.py22
-rw-r--r--testing/mozharness/configs/test/test_optional.py4
-rw-r--r--testing/mozharness/configs/test/test_override.py7
-rw-r--r--testing/mozharness/configs/test/test_override2.py6
-rw-r--r--testing/mozharness/configs/unittests/linux_unittest.py306
-rw-r--r--testing/mozharness/configs/unittests/mac_unittest.py257
-rw-r--r--testing/mozharness/configs/unittests/thunderbird_extra.py17
-rw-r--r--testing/mozharness/configs/unittests/win_taskcluster_unittest.py274
-rw-r--r--testing/mozharness/configs/unittests/win_unittest.py281
-rw-r--r--testing/mozharness/configs/users/aki/gaia_json.py42
-rw-r--r--testing/mozharness/configs/users/sfink/mock.py3
-rw-r--r--testing/mozharness/configs/users/sfink/spidermonkey.py38
-rw-r--r--testing/mozharness/configs/web_platform_tests/prod_config.py47
-rw-r--r--testing/mozharness/configs/web_platform_tests/prod_config_windows.py48
-rw-r--r--testing/mozharness/configs/web_platform_tests/prod_config_windows_taskcluster.py48
-rw-r--r--testing/mozharness/configs/web_platform_tests/test_config.py32
-rw-r--r--testing/mozharness/configs/web_platform_tests/test_config_windows.py43
-rw-r--r--testing/mozharness/docs/Makefile177
-rw-r--r--testing/mozharness/docs/android_emulator_build.rst7
-rw-r--r--testing/mozharness/docs/android_emulator_unittest.rst7
-rw-r--r--testing/mozharness/docs/bouncer_submitter.rst8
-rw-r--r--testing/mozharness/docs/bump_gaia_json.rst7
-rw-r--r--testing/mozharness/docs/conf.py268
-rw-r--r--testing/mozharness/docs/configtest.rst7
-rw-r--r--testing/mozharness/docs/desktop_l10n.rst7
-rw-r--r--testing/mozharness/docs/desktop_unittest.rst7
-rw-r--r--testing/mozharness/docs/fx_desktop_build.rst7
-rw-r--r--testing/mozharness/docs/gaia_build_integration.rst7
-rw-r--r--testing/mozharness/docs/gaia_integration.rst7
-rw-r--r--testing/mozharness/docs/gaia_unit.rst7
-rw-r--r--testing/mozharness/docs/index.rst24
-rw-r--r--testing/mozharness/docs/marionette.rst7
-rw-r--r--testing/mozharness/docs/mobile_l10n.rst7
-rw-r--r--testing/mozharness/docs/mobile_partner_repack.rst7
-rw-r--r--testing/mozharness/docs/modules.rst13
-rw-r--r--testing/mozharness/docs/mozharness.base.rst101
-rw-r--r--testing/mozharness/docs/mozharness.base.vcs.rst46
-rw-r--r--testing/mozharness/docs/mozharness.mozilla.building.rst22
-rw-r--r--testing/mozharness/docs/mozharness.mozilla.l10n.rst30
-rw-r--r--testing/mozharness/docs/mozharness.mozilla.rst111
-rw-r--r--testing/mozharness/docs/mozharness.mozilla.testing.rst62
-rw-r--r--testing/mozharness/docs/mozharness.rst18
-rw-r--r--testing/mozharness/docs/multil10n.rst7
-rw-r--r--testing/mozharness/docs/scripts.rst22
-rw-r--r--testing/mozharness/docs/spidermonkey_build.rst7
-rw-r--r--testing/mozharness/docs/talos_script.rst7
-rw-r--r--testing/mozharness/docs/web_platform_tests.rst7
-rwxr-xr-xtesting/mozharness/examples/action_config_script.py130
-rwxr-xr-xtesting/mozharness/examples/silent_script.py22
-rwxr-xr-xtesting/mozharness/examples/venv.py41
-rwxr-xr-xtesting/mozharness/examples/verbose_script.py63
-rw-r--r--testing/mozharness/external_tools/__init__.py0
-rwxr-xr-xtesting/mozharness/external_tools/clobberer.py280
-rwxr-xr-xtesting/mozharness/external_tools/count_and_reboot.py62
-rw-r--r--testing/mozharness/external_tools/detect_repo.py52
-rwxr-xr-xtesting/mozharness/external_tools/download_file.py69
-rw-r--r--testing/mozharness/external_tools/extract_and_run_command.py205
-rwxr-xr-xtesting/mozharness/external_tools/git-ssh-wrapper.sh12
-rwxr-xr-xtesting/mozharness/external_tools/gittool.py94
-rw-r--r--testing/mozharness/external_tools/machine-configuration.json12
-rwxr-xr-xtesting/mozharness/external_tools/mouse_and_screen_resolution.py153
-rw-r--r--testing/mozharness/external_tools/performance-artifact-schema.json164
-rw-r--r--testing/mozharness/external_tools/robustcheckout.py451
-rw-r--r--testing/mozharness/external_tools/virtualenv/AUTHORS.txt91
-rw-r--r--testing/mozharness/external_tools/virtualenv/LICENSE.txt22
-rw-r--r--testing/mozharness/external_tools/virtualenv/MANIFEST.in12
-rw-r--r--testing/mozharness/external_tools/virtualenv/PKG-INFO87
-rw-r--r--testing/mozharness/external_tools/virtualenv/README.rst31
-rwxr-xr-xtesting/mozharness/external_tools/virtualenv/bin/rebuild-script.py73
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/Makefile130
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/changes.rst985
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/conf.py153
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/development.rst61
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/index.rst137
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/installation.rst58
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/make.bat170
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/reference.rst261
-rw-r--r--testing/mozharness/external_tools/virtualenv/docs/userguide.rst258
-rw-r--r--testing/mozharness/external_tools/virtualenv/scripts/virtualenv3
-rw-r--r--testing/mozharness/external_tools/virtualenv/setup.cfg8
-rw-r--r--testing/mozharness/external_tools/virtualenv/setup.py123
-rw-r--r--testing/mozharness/external_tools/virtualenv/site.py760
-rw-r--r--testing/mozharness/external_tools/virtualenv/tests/__init__.py0
-rwxr-xr-xtesting/mozharness/external_tools/virtualenv/tests/test_activate.sh96
-rw-r--r--testing/mozharness/external_tools/virtualenv/tests/test_activate_output.expected2
-rw-r--r--testing/mozharness/external_tools/virtualenv/tests/test_cmdline.py44
-rw-r--r--testing/mozharness/external_tools/virtualenv/tests/test_virtualenv.py139
-rwxr-xr-xtesting/mozharness/external_tools/virtualenv/virtualenv.py2329
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.bat30
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.csh36
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.fish76
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.ps1150
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.sh78
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate_this.py34
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/deactivate.bat19
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils-init.py101
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils.cfg6
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/python-config78
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_embedded/site.py758
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_support/__init__.py0
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whlbin0 -> 23000 bytes
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whlbin0 -> 1198961 bytes
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whlbin0 -> 442860 bytes
-rw-r--r--testing/mozharness/external_tools/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whlbin0 -> 66878 bytes
-rw-r--r--testing/mozharness/mach_commands.py196
-rw-r--r--testing/mozharness/mozfile/__init__.py5
-rw-r--r--testing/mozharness/mozfile/mozfile.py372
-rw-r--r--testing/mozharness/mozharness/__init__.py2
-rw-r--r--testing/mozharness/mozharness/base/__init__.py0
-rw-r--r--testing/mozharness/mozharness/base/config.py569
-rw-r--r--testing/mozharness/mozharness/base/diskutils.py156
-rwxr-xr-xtesting/mozharness/mozharness/base/errors.py213
-rwxr-xr-xtesting/mozharness/mozharness/base/log.py694
-rwxr-xr-xtesting/mozharness/mozharness/base/parallel.py36
-rw-r--r--testing/mozharness/mozharness/base/python.py743
-rwxr-xr-xtesting/mozharness/mozharness/base/script.py2273
-rwxr-xr-xtesting/mozharness/mozharness/base/signing.py164
-rwxr-xr-xtesting/mozharness/mozharness/base/transfer.py123
-rw-r--r--testing/mozharness/mozharness/base/vcs/__init__.py0
-rw-r--r--testing/mozharness/mozharness/base/vcs/gittool.py95
-rwxr-xr-xtesting/mozharness/mozharness/base/vcs/mercurial.py497
-rw-r--r--testing/mozharness/mozharness/base/vcs/tcvcs.py49
-rwxr-xr-xtesting/mozharness/mozharness/base/vcs/vcsbase.py149
-rw-r--r--testing/mozharness/mozharness/base/vcs/vcssync.py101
-rw-r--r--testing/mozharness/mozharness/lib/__init__.py0
-rw-r--r--testing/mozharness/mozharness/lib/python/__init__.py0
-rw-r--r--testing/mozharness/mozharness/lib/python/authentication.py53
-rw-r--r--testing/mozharness/mozharness/mozilla/__init__.py0
-rw-r--r--testing/mozharness/mozharness/mozilla/aws.py11
-rw-r--r--testing/mozharness/mozharness/mozilla/blob_upload.py109
-rw-r--r--testing/mozharness/mozharness/mozilla/bouncer/__init__.py0
-rw-r--r--testing/mozharness/mozharness/mozilla/bouncer/submitter.py114
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/buildbot.py246
-rw-r--r--testing/mozharness/mozharness/mozilla/building/__init__.py0
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/building/buildbase.py2155
-rw-r--r--testing/mozharness/mozharness/mozilla/building/hazards.py241
-rw-r--r--testing/mozharness/mozharness/mozilla/checksums.py21
-rw-r--r--testing/mozharness/mozharness/mozilla/l10n/__init__.py0
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/l10n/locales.py280
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/l10n/multi_locale_build.py254
-rw-r--r--testing/mozharness/mozharness/mozilla/mapper.py81
-rw-r--r--testing/mozharness/mozharness/mozilla/mar.py112
-rw-r--r--testing/mozharness/mozharness/mozilla/mock.py251
-rw-r--r--testing/mozharness/mozharness/mozilla/mozbase.py39
-rw-r--r--testing/mozharness/mozharness/mozilla/proxxy.py167
-rw-r--r--testing/mozharness/mozharness/mozilla/purge.py103
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/release.py72
-rw-r--r--testing/mozharness/mozharness/mozilla/repo_manifest.py226
-rw-r--r--testing/mozharness/mozharness/mozilla/repo_manupulation.py164
-rw-r--r--testing/mozharness/mozharness/mozilla/secrets.py74
-rw-r--r--testing/mozharness/mozharness/mozilla/selfserve.py47
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/signing.py101
-rw-r--r--testing/mozharness/mozharness/mozilla/structuredlog.py173
-rw-r--r--testing/mozharness/mozharness/mozilla/taskcluster_helper.py274
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/__init__.py0
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/codecoverage.py78
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/device.py738
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/errors.py119
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/firefox_media_tests.py289
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/firefox_ui_tests.py300
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/mozpool.py134
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/testing/talos.py430
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/testing/testbase.py863
-rw-r--r--testing/mozharness/mozharness/mozilla/testing/try_tools.py258
-rwxr-xr-xtesting/mozharness/mozharness/mozilla/testing/unittest.py262
-rw-r--r--testing/mozharness/mozharness/mozilla/tooltool.py129
-rw-r--r--testing/mozharness/mozharness/mozilla/updates/__init__.py0
-rw-r--r--testing/mozharness/mozharness/mozilla/updates/balrog.py149
-rw-r--r--testing/mozharness/mozharness/mozilla/vcstools.py57
-rw-r--r--testing/mozharness/mozinfo/__init__.py56
-rwxr-xr-xtesting/mozharness/mozinfo/mozinfo.py209
-rw-r--r--testing/mozharness/mozprocess/__init__.py5
-rwxr-xr-xtesting/mozharness/mozprocess/pid.py88
-rw-r--r--testing/mozharness/mozprocess/processhandler.py921
-rw-r--r--testing/mozharness/mozprocess/qijo.py140
-rw-r--r--testing/mozharness/mozprocess/winprocess.py457
-rw-r--r--testing/mozharness/mozprocess/wpk.py54
-rw-r--r--testing/mozharness/requirements.txt25
-rw-r--r--testing/mozharness/scripts/android_emulator_unittest.py755
-rwxr-xr-xtesting/mozharness/scripts/bouncer_submitter.py192
-rwxr-xr-xtesting/mozharness/scripts/configtest.py142
-rwxr-xr-xtesting/mozharness/scripts/desktop_l10n.py1152
-rwxr-xr-xtesting/mozharness/scripts/desktop_partner_repacks.py198
-rwxr-xr-xtesting/mozharness/scripts/desktop_unittest.py742
-rw-r--r--testing/mozharness/scripts/firefox_media_tests_buildbot.py122
-rwxr-xr-xtesting/mozharness/scripts/firefox_media_tests_jenkins.py48
-rw-r--r--testing/mozharness/scripts/firefox_media_tests_taskcluster.py110
-rwxr-xr-xtesting/mozharness/scripts/firefox_ui_tests/functional.py20
-rwxr-xr-xtesting/mozharness/scripts/firefox_ui_tests/update.py20
-rwxr-xr-xtesting/mozharness/scripts/firefox_ui_tests/update_release.py323
-rwxr-xr-xtesting/mozharness/scripts/fx_desktop_build.py235
-rwxr-xr-xtesting/mozharness/scripts/gaia_build_integration.py56
-rwxr-xr-xtesting/mozharness/scripts/gaia_build_unit.py56
-rw-r--r--testing/mozharness/scripts/gaia_integration.py75
-rwxr-xr-xtesting/mozharness/scripts/gaia_linter.py148
-rwxr-xr-xtesting/mozharness/scripts/gaia_unit.py109
-rwxr-xr-xtesting/mozharness/scripts/marionette.py358
-rw-r--r--testing/mozharness/scripts/marionette_harness_tests.py141
-rwxr-xr-xtesting/mozharness/scripts/merge_day/gecko_migration.py545
-rwxr-xr-xtesting/mozharness/scripts/mobile_l10n.py714
-rwxr-xr-xtesting/mozharness/scripts/mobile_partner_repack.py327
-rwxr-xr-xtesting/mozharness/scripts/multil10n.py21
-rw-r--r--testing/mozharness/scripts/openh264_build.py250
-rw-r--r--testing/mozharness/scripts/release/antivirus.py193
-rwxr-xr-xtesting/mozharness/scripts/release/beet_mover.py372
-rw-r--r--testing/mozharness/scripts/release/generate-checksums.py284
-rw-r--r--testing/mozharness/scripts/release/postrelease_bouncer_aliases.py107
-rw-r--r--testing/mozharness/scripts/release/postrelease_mark_as_shipped.py110
-rw-r--r--testing/mozharness/scripts/release/postrelease_version_bump.py184
-rw-r--r--testing/mozharness/scripts/release/publish_balrog.py119
-rw-r--r--testing/mozharness/scripts/release/push-candidate-to-releases.py200
-rw-r--r--testing/mozharness/scripts/release/updates.py299
-rw-r--r--testing/mozharness/scripts/release/uptake_monitoring.py188
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey/build.b2g8
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey/build.browser10
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey/build.shell6
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey_build.py482
-rwxr-xr-xtesting/mozharness/scripts/talos_script.py21
-rwxr-xr-xtesting/mozharness/scripts/web_platform_tests.py258
-rw-r--r--testing/mozharness/setup.cfg2
-rw-r--r--testing/mozharness/setup.py35
-rw-r--r--testing/mozharness/test/README2
-rw-r--r--testing/mozharness/test/helper_files/.noserc2
-rw-r--r--testing/mozharness/test/helper_files/archives/archive.tarbin0 -> 10240 bytes
-rw-r--r--testing/mozharness/test/helper_files/archives/archive.tar.bz2bin0 -> 256 bytes
-rw-r--r--testing/mozharness/test/helper_files/archives/archive.tar.gzbin0 -> 260 bytes
-rw-r--r--testing/mozharness/test/helper_files/archives/archive.zipbin0 -> 517 bytes
-rw-r--r--testing/mozharness/test/helper_files/archives/archive_invalid_filename.zipbin0 -> 166 bytes
-rwxr-xr-xtesting/mozharness/test/helper_files/archives/reference/bin/script.sh3
-rw-r--r--testing/mozharness/test/helper_files/archives/reference/lorem.txt1
-rwxr-xr-xtesting/mozharness/test/helper_files/create_archives.sh11
-rwxr-xr-xtesting/mozharness/test/helper_files/init_hgrepo.sh24
-rw-r--r--testing/mozharness/test/helper_files/locales.json18
-rw-r--r--testing/mozharness/test/helper_files/locales.txt4
-rw-r--r--testing/mozharness/test/hgrc9
-rw-r--r--testing/mozharness/test/pip-freeze.example.txt19
-rw-r--r--testing/mozharness/test/test_base_config.py308
-rw-r--r--testing/mozharness/test/test_base_diskutils.py84
-rw-r--r--testing/mozharness/test/test_base_log.py42
-rw-r--r--testing/mozharness/test/test_base_parallel.py26
-rw-r--r--testing/mozharness/test/test_base_python.py37
-rw-r--r--testing/mozharness/test/test_base_script.py898
-rw-r--r--testing/mozharness/test/test_base_transfer.py127
-rw-r--r--testing/mozharness/test/test_base_vcs_mercurial.py440
-rw-r--r--testing/mozharness/test/test_l10n_locales.py132
-rw-r--r--testing/mozharness/test/test_mozilla_blob_upload.py103
-rw-r--r--testing/mozharness/test/test_mozilla_buildbot.py62
-rw-r--r--testing/mozharness/test/test_mozilla_release.py42
-rw-r--r--testing/mozharness/tox.ini27
-rwxr-xr-xtesting/mozharness/unit.sh85
454 files changed, 52390 insertions, 0 deletions
diff --git a/testing/mozharness/LICENSE b/testing/mozharness/LICENSE
new file mode 100644
index 000000000..a612ad981
--- /dev/null
+++ b/testing/mozharness/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/testing/mozharness/README.txt b/testing/mozharness/README.txt
new file mode 100644
index 000000000..d2a2ce60a
--- /dev/null
+++ b/testing/mozharness/README.txt
@@ -0,0 +1,32 @@
+# Mozharness
+
+## Docs
+* https://developer.mozilla.org/en-US/docs/Mozharness_FAQ
+* https://wiki.mozilla.org/ReleaseEngineering/Mozharness
+* http://moz-releng-mozharness.readthedocs.org/en/latest/mozharness.mozilla.html
+* http://moz-releng-docs.readthedocs.org/en/latest/software.html#mozharness
+
+## Submitting changes
+Like any Gecko change, please create a patch or submit to Mozreview and
+open a Bugzilla ticket under the Mozharness component:
+https://bugzilla.mozilla.org/enter_bug.cgi?product=Release%20Engineering&component=Mozharness
+
+This bug will get triaged by Release Engineering
+
+## Run unit tests
+To run the unit tests of mozharness the `tox` package needs to be installed:
+
+```
+pip install tox
+```
+
+There are various ways to run the unit tests. Just make sure you are within the `$gecko_repo/testing/mozharness` directory before running one of the commands below:
+
+```
+tox # run all unit tests
+tox -- -x # run all unit tests but stop after first failure
+tox -- test/test_base_log.py # only run the base log unit test
+```
+
+Happy contributing! =)
+
diff --git a/testing/mozharness/configs/android/androidarm.py b/testing/mozharness/configs/android/androidarm.py
new file mode 100644
index 000000000..fc4f742dc
--- /dev/null
+++ b/testing/mozharness/configs/android/androidarm.py
@@ -0,0 +1,459 @@
+import os
+
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "host_utils_url": "http://talos-remote.pvt.build.mozilla.org/tegra/tegra-host-utils.Linux.1109310.2.zip",
+ "robocop_package_name": "org.mozilla.roboexample.test",
+ "device_ip": "127.0.0.1",
+ "default_sut_port1": "20701",
+ "default_sut_port2": "20700", # does not prompt for commands
+ "tooltool_manifest_path": "testing/config/tooltool-manifests/androidarm/releng.manifest",
+ "tooltool_cache": "/builds/tooltool_cache",
+ "emulator_manifest": """
+ [
+ {
+ "size": 193383673,
+ "digest": "6609e8b95db59c6a3ad60fc3dcfc358b2c8ec8b4dda4c2780eb439e1c5dcc5d550f2e47ce56ba14309363070078d09b5287e372f6e95686110ff8a2ef1838221",
+ "algorithm": "sha512",
+ "filename": "android-sdk18_0.r18moz1.orig.tar.gz",
+ "unpack": "True"
+ }
+ ] """,
+ "emulator_process_name": "emulator64-arm",
+ "emulator_extra_args": "-debug init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket -qemu -m 1024 -cpu cortex-a9",
+ "device_manager": "sut",
+ "exes": {
+ 'adb': '%(abs_work_dir)s/android-sdk18/platform-tools/adb',
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+ "env": {
+ "DISPLAY": ":0.0",
+ "PATH": "%(PATH)s:%(abs_work_dir)s/android-sdk18/tools:%(abs_work_dir)s/android-sdk18/platform-tools",
+ "MINIDUMP_SAVEPATH": "%(abs_work_dir)s/../minidumps"
+ },
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'setup-avds',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'install',
+ 'run-tests',
+ ],
+ "emulator": {
+ "name": "test-1",
+ "device_id": "emulator-5554",
+ "http_port": "8854", # starting http port to use for the mochitest server
+ "ssl_port": "4454", # starting ssl port to use for the server
+ "emulator_port": 5554,
+ "sut_port1": 20701,
+ "sut_port2": 20700
+ },
+ "suite_definitions": {
+ "mochitest": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=sut",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--extra-profile-file=fonts",
+ "--extra-profile-file=hyphenation",
+ "--screenshot-on-fail",
+ ],
+ },
+ "mochitest-gl": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=sut",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--total-chunks=4",
+ "--subsuite=webgl",
+ ],
+ },
+ "mochitest-media": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=sut",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--total-chunks=2",
+ "--subsuite=media",
+ ],
+ },
+ "robocop": {
+ "run_filename": "runrobocop.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=sut",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--total-chunks=4",
+ "--robocop-apk=../../robocop.apk",
+ "--robocop-ini=robocop.ini",
+ ],
+ },
+ "reftest": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path",
+ "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=16",
+ "--extra-profile-file=fonts",
+ "--extra-profile-file=hyphenation",
+ "--suite=reftest",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ ],
+ "tests": ["tests/layout/reftests/reftest.list"],
+ },
+ "crashtest": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path",
+ "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=2",
+ "--suite=crashtest",
+ ],
+ "tests": ["tests/testing/crashtest/crashtests.list"],
+ },
+ "jsreftest": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path",
+ "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=6",
+ "--extra-profile-file=jsreftest/tests/user.js",
+ "--suite=jstestbrowser",
+ ],
+ "tests": ["../jsreftest/tests/jstests.list"],
+ },
+ "xpcshell": {
+ "run_filename": "remotexpcshelltests.py",
+ "testsdir": "xpcshell",
+ "options": [
+ "--dm_trans=sut",
+ "--deviceIP=%(device_ip)s",
+ "--devicePort=%(device_port)s",
+ "--xre-path=%(xre_path)s",
+ "--testing-modules-dir=%(modules_dir)s",
+ "--apk=%(installer_path)s",
+ "--no-logfiles",
+ "--symbols-path=%(symbols_path)s",
+ "--manifest=tests/xpcshell.ini",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--total-chunks=3",
+ ],
+ },
+ }, # end suite_definitions
+ "test_suite_definitions": {
+ "jsreftest-1": {
+ "category": "jsreftest",
+ "extra_args": ["--this-chunk=1"],
+ },
+ "jsreftest-2": {
+ "category": "jsreftest",
+ "extra_args": ["--this-chunk=2"],
+ },
+ "jsreftest-3": {
+ "category": "jsreftest",
+ "extra_args": ["--this-chunk=3"],
+ },
+ "jsreftest-4": {
+ "category": "jsreftest",
+ "extra_args": ["--this-chunk=4"],
+ },
+ "jsreftest-5": {
+ "category": "jsreftest",
+ "extra_args": ["--this-chunk=5"],
+ },
+ "jsreftest-6": {
+ "category": "jsreftest",
+ "extra_args": ["--this-chunk=6"],
+ },
+ "mochitest-1": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=1"],
+ },
+ "mochitest-2": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=2"],
+ },
+ "mochitest-3": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=3"],
+ },
+ "mochitest-4": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=4"],
+ },
+ "mochitest-5": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=5"],
+ },
+ "mochitest-6": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=6"],
+ },
+ "mochitest-7": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=7"],
+ },
+ "mochitest-8": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=8"],
+ },
+ "mochitest-9": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=9"],
+ },
+ "mochitest-10": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=10"],
+ },
+ "mochitest-11": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=11"],
+ },
+ "mochitest-12": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=12"],
+ },
+ "mochitest-13": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=13"],
+ },
+ "mochitest-14": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=14"],
+ },
+ "mochitest-15": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=15"],
+ },
+ "mochitest-16": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=16"],
+ },
+ "mochitest-chrome": {
+ "category": "mochitest",
+ "extra_args": ["--flavor=chrome"],
+ },
+ "mochitest-media-1": {
+ "category": "mochitest-media",
+ "extra_args": ["--this-chunk=1"],
+ },
+ "mochitest-media-2": {
+ "category": "mochitest-media",
+ "extra_args": ["--this-chunk=2"],
+ },
+ "mochitest-gl-1": {
+ "category": "mochitest-gl",
+ "extra_args": ["--this-chunk=1"],
+ },
+ "mochitest-gl-2": {
+ "category": "mochitest-gl",
+ "extra_args": ["--this-chunk=2"],
+ },
+ "mochitest-gl-3": {
+ "category": "mochitest-gl",
+ "extra_args": ["--this-chunk=3"],
+ },
+ "mochitest-gl-4": {
+ "category": "mochitest-gl",
+ "extra_args": ["--this-chunk=4"],
+ },
+ "reftest-1": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=1"],
+ },
+ "reftest-2": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=2"],
+ },
+ "reftest-3": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=3"],
+ },
+ "reftest-4": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=4"],
+ },
+ "reftest-5": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=5"],
+ },
+ "reftest-6": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=6"],
+ },
+ "reftest-7": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=7"],
+ },
+ "reftest-8": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=8"],
+ },
+ "reftest-9": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=9"],
+ },
+ "reftest-10": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=10"],
+ },
+ "reftest-11": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=11"],
+ },
+ "reftest-12": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=12"],
+ },
+ "reftest-13": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=13"],
+ },
+ "reftest-14": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=14"],
+ },
+ "reftest-15": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=15"],
+ },
+ "reftest-16": {
+ "category": "reftest",
+ "extra_args": ["--total-chunks=16", "--this-chunk=16"],
+ },
+ "crashtest-1": {
+ "category": "crashtest",
+ "extra_args": ["--this-chunk=1"],
+ },
+ "crashtest-2": {
+ "category": "crashtest",
+ "extra_args": ["--this-chunk=2"],
+ },
+ "xpcshell-1": {
+ "category": "xpcshell",
+ "extra_args": ["--total-chunks=3", "--this-chunk=1"],
+ },
+ "xpcshell-2": {
+ "category": "xpcshell",
+ "extra_args": ["--total-chunks=3", "--this-chunk=2"],
+ },
+ "xpcshell-3": {
+ "category": "xpcshell",
+ "extra_args": ["--total-chunks=3", "--this-chunk=3"],
+ },
+ "robocop-1": {
+ "category": "robocop",
+ "extra_args": ["--this-chunk=1"],
+ },
+ "robocop-2": {
+ "category": "robocop",
+ "extra_args": ["--this-chunk=2"],
+ },
+ "robocop-3": {
+ "category": "robocop",
+ "extra_args": ["--this-chunk=3"],
+ },
+ "robocop-4": {
+ "category": "robocop",
+ "extra_args": ["--this-chunk=4"],
+ },
+ }, # end of "test_definitions"
+ "download_minidump_stackwalk": True,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+}
diff --git a/testing/mozharness/configs/android/androidarm_4_3-tc.py b/testing/mozharness/configs/android/androidarm_4_3-tc.py
new file mode 100644
index 000000000..dd87e6695
--- /dev/null
+++ b/testing/mozharness/configs/android/androidarm_4_3-tc.py
@@ -0,0 +1,10 @@
+config = {
+ # Additional Android 4.3 settings required when running in taskcluster.
+ "avds_dir": "/home/worker/workspace/build/.android",
+ "tooltool_cache": "/home/worker/tooltool_cache",
+ "download_tooltool": True,
+ "tooltool_servers": ['http://relengapi/tooltool/'],
+ "exes": {
+ 'adb': '%(abs_work_dir)s/android-sdk18/platform-tools/adb',
+ }
+}
diff --git a/testing/mozharness/configs/android/androidarm_4_3.py b/testing/mozharness/configs/android/androidarm_4_3.py
new file mode 100644
index 000000000..bae25fecc
--- /dev/null
+++ b/testing/mozharness/configs/android/androidarm_4_3.py
@@ -0,0 +1,383 @@
+import os
+
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
+ "robocop_package_name": "org.mozilla.roboexample.test",
+ "marionette_address": "localhost:2828",
+ "marionette_test_manifest": "unit-tests.ini",
+ "tooltool_manifest_path": "testing/config/tooltool-manifests/androidarm_4_3/releng.manifest",
+ "tooltool_cache": "/builds/tooltool_cache",
+ "avds_dir": "/home/cltbld/.android",
+ "emulator_manifest": """
+ [
+ {
+ "size": 140097024,
+ "digest": "51781032335c09103e8509b1a558bf22a7119392cf1ea301c49c01bdf21ff0ceb37d260bc1c322cd9b903252429fb01830fc27e4632be30cd345c95bf4b1a39b",
+ "algorithm": "sha512",
+ "filename": "android-sdk_r24.0.2-linux.tgz",
+ "unpack": "True"
+ }
+ ] """,
+ "tools_manifest": """
+ [
+ {
+ "size": 193383673,
+ "digest": "6609e8b95db59c6a3ad60fc3dcfc358b2c8ec8b4dda4c2780eb439e1c5dcc5d550f2e47ce56ba14309363070078d09b5287e372f6e95686110ff8a2ef1838221",
+ "algorithm": "sha512",
+ "filename": "android-sdk18_0.r18moz1.orig.tar.gz",
+ "unpack": "True"
+ }
+ ] """,
+ "emulator_process_name": "emulator64-arm",
+ "emulator_extra_args": "-show-kernel -debug init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket",
+ "device_manager": "adb",
+ "exes": {
+ 'adb': '%(abs_work_dir)s/android-sdk18/platform-tools/adb',
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+ "env": {
+ "DISPLAY": ":0.0",
+ "PATH": "%(PATH)s:%(abs_work_dir)s/android-sdk-linux/tools:%(abs_work_dir)s/android-sdk18/platform-tools",
+ "MINIDUMP_SAVEPATH": "%(abs_work_dir)s/../minidumps"
+ },
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'setup-avds',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'install',
+ 'run-tests',
+ ],
+ "emulator": {
+ "name": "test-1",
+ "device_id": "emulator-5554",
+ "http_port": "8854", # starting http port to use for the mochitest server
+ "ssl_port": "4454", # starting ssl port to use for the server
+ "emulator_port": 5554,
+ },
+ "suite_definitions": {
+ "mochitest": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--extra-profile-file=fonts",
+ "--extra-profile-file=hyphenation",
+ "--screenshot-on-fail",
+ "--total-chunks=20",
+ ],
+ },
+ "mochitest-gl": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--total-chunks=10",
+ "--subsuite=webgl",
+ ],
+ },
+ "mochitest-chrome": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--extra-profile-file=fonts",
+ "--extra-profile-file=hyphenation",
+ "--screenshot-on-fail",
+ "--flavor=chrome",
+ ],
+ },
+ "mochitest-plain-gpu": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--subsuite=gpu",
+ ],
+ },
+ "mochitest-plain-clipboard": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--subsuite=clipboard",
+ ],
+ },
+ "mochitest-media": {
+ "run_filename": "runtestsremote.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--chunk-by-runtime",
+ "--total-chunks=2",
+ "--subsuite=media",
+ ],
+ },
+ "robocop": {
+ "run_filename": "runrobocop.py",
+ "testsdir": "mochitest",
+ "options": [
+ "--dm_trans=adb",
+ "--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--total-chunks=4",
+ "--robocop-apk=../../robocop.apk",
+ "--robocop-ini=robocop.ini",
+ ],
+ },
+ "reftest": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--dm_trans=adb",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path", "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=16",
+ "--extra-profile-file=fonts",
+ "--extra-profile-file=hyphenation",
+ "--suite=reftest",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ ],
+ "tests": ["tests/layout/reftests/reftest.list",],
+ },
+ "reftest-debug": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--dm_trans=adb",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path", "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=48",
+ "--extra-profile-file=fonts",
+ "--extra-profile-file=hyphenation",
+ "tests/layout/reftests/reftest.list",
+ ],
+ },
+ "crashtest": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--dm_trans=adb",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path",
+ "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=4",
+ "--suite=crashtest",
+ ],
+ "tests": ["tests/testing/crashtest/crashtests.list",],
+ },
+ "crashtest-debug": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--dm_trans=adb",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path",
+ "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=10",
+ "tests/testing/crashtest/crashtests.list",
+ ],
+ },
+ "jsreftest": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--dm_trans=adb",
+ "--remote-webserver=%(remote_webserver)s", "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s", "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s", "--httpd-path", "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--total-chunks=6",
+ "--extra-profile-file=jsreftest/tests/user.js",
+ "--suite=jstestbrowser",
+ ],
+ "tests": ["../jsreftest/tests/jstests.list",],
+ },
+ "jsreftest-debug": {
+ "run_filename": "remotereftest.py",
+ "testsdir": "reftest",
+ "options": [
+ "--app=%(app)s",
+ "--ignore-window-size",
+ "--dm_trans=adb",
+ "--remote-webserver=%(remote_webserver)s", "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s", "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s", "--httpd-path", "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "../jsreftest/tests/jstests.list",
+ "--total-chunks=20",
+ "--extra-profile-file=jsreftest/tests/user.js",
+ ],
+ },
+ "xpcshell": {
+ "run_filename": "remotexpcshelltests.py",
+ "testsdir": "xpcshell",
+ "install": False,
+ "options": [
+ "--dm_trans=adb",
+ "--xre-path=%(xre_path)s",
+ "--testing-modules-dir=%(modules_dir)s",
+ "--apk=%(installer_path)s",
+ "--no-logfiles",
+ "--symbols-path=%(symbols_path)s",
+ "--manifest=tests/xpcshell.ini",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--test-plugin-path=none",
+ "--total-chunks=3",
+ ],
+ },
+ "cppunittest": {
+ "run_filename": "remotecppunittests.py",
+ "testsdir": "cppunittest",
+ "install": False,
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--xre-path=%(xre_path)s",
+ "--dm_trans=adb",
+ "--localBinDir=../bin",
+ "--apk=%(installer_path)s",
+ ".",
+ ],
+ },
+ "marionette": {
+ "run_filename": os.path.join("harness", "marionette_harness", "runtests.py"),
+ "testsdir": "marionette",
+ "options": [
+ "--emulator",
+ "--app=fennec",
+ "--package=%(app)s",
+ "--address=%(address)s",
+ "%(test_manifest)s",
+ "--disable-e10s",
+ "--gecko-log=%(gecko_log)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--symbols-path=%(symbols_path)s",
+ "--startup-timeout=300",
+ ],
+ },
+
+ }, # end suite_definitions
+ "download_minidump_stackwalk": True,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+}
diff --git a/testing/mozharness/configs/android/androidarm_dev.py b/testing/mozharness/configs/android/androidarm_dev.py
new file mode 100644
index 000000000..e4de6a9f2
--- /dev/null
+++ b/testing/mozharness/configs/android/androidarm_dev.py
@@ -0,0 +1,9 @@
+# This config contains dev values that will replace
+# the values specified in the production config
+# if specified like this (order matters):
+# --cfg android/androidarm.py
+# --cfg android/androidarm_dev.py
+import os
+config = {
+ "tooltool_cache_path": os.path.join(os.getenv("HOME"), "cache"),
+}
diff --git a/testing/mozharness/configs/android/androidx86-tc.py b/testing/mozharness/configs/android/androidx86-tc.py
new file mode 100644
index 000000000..8141b77f6
--- /dev/null
+++ b/testing/mozharness/configs/android/androidx86-tc.py
@@ -0,0 +1,73 @@
+import os
+
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
+ "tooltool_manifest_path": "testing/config/tooltool-manifests/androidx86/releng.manifest",
+ "tooltool_cache": "/home/worker/tooltool_cache",
+ "download_tooltool": True,
+ "tooltool_servers": ['http://relengapi/tooltool/'],
+ "avds_dir": "/home/worker/workspace/build/.android",
+ "emulator_manifest": """
+ [
+ {
+ "size": 193383673,
+ "digest": "6609e8b95db59c6a3ad60fc3dcfc358b2c8ec8b4dda4c2780eb439e1c5dcc5d550f2e47ce56ba14309363070078d09b5287e372f6e95686110ff8a2ef1838221",
+ "algorithm": "sha512",
+ "filename": "android-sdk18_0.r18moz1.orig.tar.gz",
+ "unpack": "True"
+ }
+ ] """,
+ "emulator_process_name": "emulator64-x86",
+ "emulator_extra_args": "-show-kernel -debug init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket -qemu -m 1024",
+ "device_manager": "adb",
+ "exes": {
+ 'adb': '%(abs_work_dir)s/android-sdk18/platform-tools/adb',
+ },
+ "env": {
+ "DISPLAY": ":0.0",
+ "PATH": "%(PATH)s:%(abs_work_dir)s/android-sdk18/tools:%(abs_work_dir)s/android-sdk18/platform-tools",
+ "MINIDUMP_SAVEPATH": "%(abs_work_dir)s/../minidumps"
+ },
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'setup-avds',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'run-tests',
+ ],
+ "emulator": {
+ "name": "test-1",
+ "device_id": "emulator-5554",
+ "http_port": "8854", # starting http port to use for the mochitest server
+ "ssl_port": "4454", # starting ssl port to use for the server
+ "emulator_port": 5554,
+ },
+ "suite_definitions": {
+ "xpcshell": {
+ "run_filename": "remotexpcshelltests.py",
+ "testsdir": "xpcshell",
+ "install": False,
+ "options": [
+ "--dm_trans=adb",
+ "--xre-path=%(xre_path)s",
+ "--testing-modules-dir=%(modules_dir)s",
+ "--apk=%(installer_path)s",
+ "--no-logfiles",
+ "--symbols-path=%(symbols_path)s",
+ "--manifest=tests/xpcshell.ini",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--test-plugin-path=none",
+ ],
+ },
+ }, # end suite_definitions
+ "download_minidump_stackwalk": True,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+}
diff --git a/testing/mozharness/configs/android/androidx86.py b/testing/mozharness/configs/android/androidx86.py
new file mode 100644
index 000000000..e74551d0a
--- /dev/null
+++ b/testing/mozharness/configs/android/androidx86.py
@@ -0,0 +1,182 @@
+import os
+
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
+ "robocop_package_name": "org.mozilla.roboexample.test",
+ "device_ip": "127.0.0.1",
+ "tooltool_manifest_path": "testing/config/tooltool-manifests/androidx86/releng.manifest",
+ "tooltool_cache": "/builds/tooltool_cache",
+ "avds_dir": "/home/cltbld/.android",
+ "emulator_manifest": """
+ [
+ {
+ "size": 193383673,
+ "digest": "6609e8b95db59c6a3ad60fc3dcfc358b2c8ec8b4dda4c2780eb439e1c5dcc5d550f2e47ce56ba14309363070078d09b5287e372f6e95686110ff8a2ef1838221",
+ "algorithm": "sha512",
+ "filename": "android-sdk18_0.r18moz1.orig.tar.gz",
+ "unpack": "True"
+ }
+ ] """,
+ "emulator_process_name": "emulator64-x86",
+ "emulator_extra_args": "-debug init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket -qemu -m 1024 -enable-kvm",
+ "device_manager": "adb",
+ "exes": {
+ 'adb': '%(abs_work_dir)s/android-sdk18/platform-tools/adb',
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+ "env": {
+ "DISPLAY": ":0.0",
+ "PATH": "%(PATH)s:%(abs_work_dir)s/android-sdk18/tools:%(abs_work_dir)s/android-sdk18/platform-tools",
+ },
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'setup-avds',
+ 'start-emulators',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ 'stop-emulators',
+ ],
+ "emulators": [
+ {
+ "name": "test-1",
+ "device_id": "emulator-5554",
+ "http_port": "8854", # starting http port to use for the mochitest server
+ "ssl_port": "4454", # starting ssl port to use for the server
+ "emulator_port": 5554,
+ },
+ {
+ "name": "test-2",
+ "device_id": "emulator-5556",
+ "http_port": "8856", # starting http port to use for the mochitest server
+ "ssl_port": "4456", # starting ssl port to use for the server
+ "emulator_port": 5556,
+ },
+ {
+ "name": "test-3",
+ "device_id": "emulator-5558",
+ "http_port": "8858", # starting http port to use for the mochitest server
+ "ssl_port": "4458", # starting ssl port to use for the server
+ "emulator_port": 5558,
+ },
+ {
+ "name": "test-4",
+ "device_id": "emulator-5560",
+ "http_port": "8860", # starting http port to use for the mochitest server
+ "ssl_port": "4460", # starting ssl port to use for the server
+ "emulator_port": 5560,
+ }
+ ],
+ "suite_definitions": {
+ "mochitest": {
+ "run_filename": "runtestsremote.py",
+ "options": ["--app=%(app)s",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--certificate-path=%(certs_path)s",
+ "--symbols-path=%(symbols_path)s",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ ],
+ },
+ "reftest": {
+ "run_filename": "remotereftest.py",
+ "options": ["--app=%(app)s",
+ "--ignore-window-size",
+ "--remote-webserver=%(remote_webserver)s",
+ "--xre-path=%(xre_path)s",
+ "--utility-path=%(utility_path)s",
+ "--http-port=%(http_port)s",
+ "--ssl-port=%(ssl_port)s",
+ "--httpd-path", "%(modules_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ ],
+ },
+ "xpcshell": {
+ "run_filename": "remotexpcshelltests.py",
+ "options": ["--xre-path=%(xre_path)s",
+ "--testing-modules-dir=%(modules_dir)s",
+ "--apk=%(installer_path)s",
+ "--no-logfiles",
+ "--symbols-path=%(symbols_path)s",
+ "--manifest=tests/xpcshell.ini",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--test-plugin-path=none",
+ ],
+ },
+ }, # end suite_definitions
+ "test_suite_definitions": {
+ "jsreftest": {
+ "category": "reftest",
+ "tests": ["../jsreftest/tests/jstests.list"],
+ "extra_args": [
+ "--suite=jstestbrowser",
+ "--extra-profile-file=jsreftest/tests/user.js"
+ ]
+ },
+ "mochitest-1": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=2", "--this-chunk=1"],
+ },
+ "mochitest-2": {
+ "category": "mochitest",
+ "extra_args": ["--total-chunks=2", "--this-chunk=2"],
+ },
+ "mochitest-gl": {
+ "category": "mochitest",
+ "extra_args": ["--subsuite=webgl"],
+ },
+ "reftest-1": {
+ "category": "reftest",
+ "extra_args": [
+ "--suite=reftest",
+ "--total-chunks=3",
+ "--this-chunk=1",
+ ],
+ "tests": ["tests/layout/reftests/reftest.list"],
+ },
+ "reftest-2": {
+ "extra_args": [
+ "--suite=reftest",
+ "--total-chunks=3",
+ "--this-chunk=2",
+ ],
+ "tests": ["tests/layout/reftests/reftest.list"],
+ },
+ "reftest-3": {
+ "extra_args": [
+ "--suite=reftest",
+ "--total-chunks=3",
+ "--this-chunk=3",
+ ],
+ "tests": ["tests/layout/reftests/reftest.list"],
+ },
+ "crashtest": {
+ "category": "reftest",
+ "extra_args": ["--suite=crashtest"],
+ "tests": ["tests/testing/crashtest/crashtests.list"]
+ },
+ "xpcshell": {
+ "category": "xpcshell",
+ # XXX --manifest is superceded by testing/config/mozharness/android_x86_config.py.
+ # Remove when Gecko 35 no longer in tbpl.
+ "extra_args": ["--manifest=tests/xpcshell_android.ini"]
+ },
+ }, # end of "test_definitions"
+ "download_minidump_stackwalk": True,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+}
diff --git a/testing/mozharness/configs/balrog/docker-worker.py b/testing/mozharness/configs/balrog/docker-worker.py
new file mode 100644
index 000000000..1ff1c2ac5
--- /dev/null
+++ b/testing/mozharness/configs/balrog/docker-worker.py
@@ -0,0 +1,18 @@
+config = {
+ 'balrog_servers': [
+ {
+ 'balrog_api_root': 'http://balrog/api',
+ 'ignore_failures': False,
+ 'url_replacements': [
+ ('http://archive.mozilla.org/pub', 'http://download.cdn.mozilla.net/pub'),
+ ],
+ 'balrog_usernames': {
+ 'firefox': 'ffxbld',
+ 'thunderbird': 'tbirdbld',
+ 'mobile': 'ffxbld',
+ 'Fennec': 'ffxbld',
+ }
+ }
+ ]
+}
+
diff --git a/testing/mozharness/configs/balrog/production.py b/testing/mozharness/configs/balrog/production.py
new file mode 100644
index 000000000..a727f77d1
--- /dev/null
+++ b/testing/mozharness/configs/balrog/production.py
@@ -0,0 +1,28 @@
+config = {
+ 'balrog_servers': [
+ {
+ 'balrog_api_root': 'https://aus4-admin.mozilla.org/api',
+ 'ignore_failures': False,
+ 'url_replacements': [
+ ('http://archive.mozilla.org/pub', 'http://download.cdn.mozilla.net/pub'),
+ ],
+ 'balrog_usernames': {
+ 'firefox': 'ffxbld',
+ 'thunderbird': 'tbirdbld',
+ 'mobile': 'ffxbld',
+ 'Fennec': 'ffxbld',
+ }
+ },
+ # Bug 1261346 - temporarily disable staging balrog submissions
+ # {
+ # 'balrog_api_root': 'https://aus4-admin-dev.allizom.org/api',
+ # 'ignore_failures': True,
+ # 'balrog_usernames': {
+ # 'firefox': 'stage-ffxbld',
+ # 'thunderbird': 'stage-tbirdbld',
+ # 'mobile': 'stage-ffxbld',
+ # 'Fennec': 'stage-ffxbld',
+ # }
+ # }
+ ]
+}
diff --git a/testing/mozharness/configs/balrog/staging.py b/testing/mozharness/configs/balrog/staging.py
new file mode 100644
index 000000000..919974122
--- /dev/null
+++ b/testing/mozharness/configs/balrog/staging.py
@@ -0,0 +1,14 @@
+config = {
+ 'balrog_servers': [
+ {
+ 'balrog_api_root': 'https://aus4-admin-dev.allizom.org/api',
+ 'ignore_failures': False,
+ 'balrog_usernames': {
+ 'firefox': 'stage-ffxbld',
+ 'thunderbird': 'stage-tbirdbld',
+ 'mobile': 'stage-ffxbld',
+ 'Fennec': 'stage-ffxbld',
+ }
+ }
+ ]
+}
diff --git a/testing/mozharness/configs/beetmover/en_us_build.yml.tmpl b/testing/mozharness/configs/beetmover/en_us_build.yml.tmpl
new file mode 100644
index 000000000..33287b042
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/en_us_build.yml.tmpl
@@ -0,0 +1,191 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for the en-US locale"
+ owner: "release@mozilla.com"
+
+mapping:
+{% for locale in locales %}
+ {{ locale }}:
+
+ {% if platform == "win32" %}
+ buildinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.json
+ mozinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.mozinfo.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.mozinfo.json
+ socorroinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.txt
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.txt
+ jsshell:
+ artifact: {{ artifact_base_url }}/jsshell-{{ platform }}.zip
+ s3_key: {{ s3_prefix }}jsshell-{{ platform }}.zip
+ mozharness_package:
+ artifact: {{ artifact_base_url }}/mozharness.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/mozharness.zip
+ xpi:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
+ s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
+ symbols:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.crashreporter-symbols.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.crashreporter-symbols.zip
+ buildid_info:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}_info.txt
+ s3_key: {{ s3_prefix }}win32_info.txt
+ sdk:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.sdk.zip
+ s3_key: {{ s3_prefix }}firefox-{{ version }}.{{ platform }}.sdk.zip
+ mar_tools_mar:
+ artifact: {{ artifact_base_url }}/mar.exe
+ s3_key: {{ s3_prefix }}mar-tools/win32/mar.exe
+ mar_tools_mbdiff:
+ artifact: {{ artifact_base_url }}/mbsdiff.exe
+ s3_key: {{ s3_prefix }}mar-tools/win32/mbsdiff.exe
+ {% endif %}
+
+ {% if platform == "win64" %}
+ buildinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.json
+ mozinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.mozinfo.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.mozinfo.json
+ socorroinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.txt
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.txt
+ jsshell:
+ artifact: {{ artifact_base_url }}/jsshell-{{ platform }}.zip
+ s3_key: {{ s3_prefix }}jsshell-{{ platform }}.zip
+ mozharness_package:
+ artifact: {{ artifact_base_url }}/mozharness.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/mozharness.zip
+ xpi:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
+ s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
+ symbols:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.crashreporter-symbols.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.crashreporter-symbols.zip
+ buildid_info:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}_info.txt
+ s3_key: {{ s3_prefix }}win64_info.txt
+ sdk:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.sdk.zip
+ s3_key: {{ s3_prefix }}firefox-{{ version }}.{{ platform }}.sdk.zip
+ mar_tools_mar:
+ artifact: {{ artifact_base_url }}/mar.exe
+ s3_key: {{ s3_prefix }}mar-tools/win64/mar.exe
+ mar_tools_mbdiff:
+ artifact: {{ artifact_base_url }}/mbsdiff.exe
+ s3_key: {{ s3_prefix }}mar-tools/win64/mbsdiff.exe
+ {% endif %}
+
+ {% if platform == "linux-i686" %}
+ buildinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.json
+ mozinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.mozinfo.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.mozinfo.json
+ socorroinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.txt
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.txt
+ jsshell:
+ artifact: {{ artifact_base_url }}/jsshell-{{ platform }}.zip
+ s3_key: {{ s3_prefix }}jsshell-{{ platform }}.zip
+ mozharness_package:
+ artifact: {{ artifact_base_url }}/mozharness.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/mozharness.zip
+ xpi:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
+ s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
+ symbols:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.crashreporter-symbols.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.crashreporter-symbols.zip
+ buildid_info:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}_info.txt
+ s3_key: {{ s3_prefix }}linux_info.txt
+ sdk:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.sdk.tar.bz2
+ s3_key: {{ s3_prefix }}firefox-{{ version }}.{{ platform }}.sdk.tar.bz2
+ mar_tools_mar:
+ artifact: {{ artifact_base_url }}/mar
+ s3_key: {{ s3_prefix }}mar-tools/linux/mar
+ mar_tools_mbdiff:
+ artifact: {{ artifact_base_url }}/mbsdiff
+ s3_key: {{ s3_prefix }}mar-tools/linux/mbsdiff
+ {% endif %}
+
+ {% if platform == "linux-x86_64" %}
+ buildinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.json
+ mozinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.mozinfo.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.mozinfo.json
+ socorroinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.txt
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.txt
+ jsshell:
+ artifact: {{ artifact_base_url }}/jsshell-{{ platform }}.zip
+ s3_key: {{ s3_prefix }}jsshell-{{ platform }}.zip
+ mozharness_package:
+ artifact: {{ artifact_base_url }}/mozharness.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/mozharness.zip
+ xpi:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
+ s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
+ symbols:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.crashreporter-symbols.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.crashreporter-symbols.zip
+ buildid_info:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}_info.txt
+ s3_key: {{ s3_prefix }}linux64_info.txt
+ sdk:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.sdk.tar.bz2
+ s3_key: {{ s3_prefix }}firefox-{{ version }}.{{ platform }}.sdk.tar.bz2
+ mar_tools_mar:
+ artifact: {{ artifact_base_url }}/mar
+ s3_key: {{ s3_prefix }}mar-tools/linux64/mar
+ mar_tools_mbdiff:
+ artifact: {{ artifact_base_url }}/mbsdiff
+ s3_key: {{ s3_prefix }}mar-tools/linux64/mbsdiff
+ {% endif %}
+
+ {% if platform == "mac" %}
+ buildinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.json
+ mozinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.mozinfo.json
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.mozinfo.json
+ socorroinfo:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.txt
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.txt
+ jsshell:
+ artifact: {{ artifact_base_url }}/jsshell-{{ platform }}.zip
+ s3_key: {{ s3_prefix }}jsshell-{{ platform }}.zip
+ mozharness_package:
+ artifact: {{ artifact_base_url }}/mozharness.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/mozharness.zip
+ xpi:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
+ s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
+ symbols:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.crashreporter-symbols.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox {{ version }}.crashreporter-symbols.zip
+ buildid_info:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}_info.txt
+ s3_key: {{ s3_prefix }}macosx64_info.txt
+ sdk:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}-x86_64.sdk.tar.bz2
+ s3_key: {{ s3_prefix }}firefox-{{ version }}.{{ platform }}-x86_64.sdk.tar.bz2
+ mar_tools_mar:
+ artifact: {{ artifact_base_url }}/mar
+ s3_key: {{ s3_prefix }}mar-tools/macosx64/mar
+ mar_tools_mbdiff:
+ artifact: {{ artifact_base_url }}/mbsdiff
+ s3_key: {{ s3_prefix }}mar-tools/macosx64/mbsdiff
+ {% endif %}
+
+{% endfor %}
diff --git a/testing/mozharness/configs/beetmover/en_us_signing.yml.tmpl b/testing/mozharness/configs/beetmover/en_us_signing.yml.tmpl
new file mode 100644
index 000000000..54fc2c792
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/en_us_signing.yml.tmpl
@@ -0,0 +1,66 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for the en-US locale"
+ owner: "release@mozilla.com"
+
+mapping:
+{% for locale in locales %}
+ {{ locale }}:
+ {% if platform == "win32" %}
+ complete_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
+ full_installer:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer.exe
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
+ {% if "esr" not in version %}
+ stub_installer:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer-stub.exe
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup Stub {{ version }}.exe
+ {% endif %}
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
+ {% endif %}
+
+ {% if platform == "win64" %}
+ complete_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
+ full_installer:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer.exe
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
+ {% endif %}
+
+ {% if platform == "linux-i686" %}
+ complete_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.tar.bz2
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
+ {% endif %}
+
+ {% if platform == "linux-x86_64" %}
+ complete_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.tar.bz2
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
+ {% endif %}
+
+ {% if platform == "mac" %}
+ complete_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.dmg
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox {{ version }}.dmg
+ {% endif %}
+
+{% endfor %}
diff --git a/testing/mozharness/configs/beetmover/l10n_changesets.tmpl b/testing/mozharness/configs/beetmover/l10n_changesets.tmpl
new file mode 100644
index 000000000..bde4bc8a7
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/l10n_changesets.tmpl
@@ -0,0 +1,11 @@
+---
+metadata:
+ name: "Beet Mover L10N Changesets"
+ description: "Maps artifact locations to s3 key names for L10N changesets"
+ owner: "release@mozilla.com"
+
+mapping:
+ all:
+ l10n_changesets:
+ artifact: {{ artifact_base_url }}/l10n_changesets.txt
+ s3_key: {{ s3_prefix }}l10n_changesets.txt
diff --git a/testing/mozharness/configs/beetmover/partials.yml.tmpl b/testing/mozharness/configs/beetmover/partials.yml.tmpl
new file mode 100644
index 000000000..a97ac42c0
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/partials.yml.tmpl
@@ -0,0 +1,16 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for partials"
+ owner: "release@mozilla.com"
+
+mapping:
+{% for locale in locales %}
+ {{ locale }}:
+ partial_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ partial_version }}-{{ version }}.{{ locale }}.{{ platform }}.partial.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ partial_version }}-{{ version }}.partial.mar
+ partial_mar_sig:
+ artifact: {{ artifact_base_url }}/firefox-{{ partial_version }}-{{ version }}.{{ locale }}.{{ platform }}.partial.mar.asc
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ partial_version }}-{{ version }}.partial.mar.asc
+{% endfor %}
diff --git a/testing/mozharness/configs/beetmover/repacks.yml.tmpl b/testing/mozharness/configs/beetmover/repacks.yml.tmpl
new file mode 100644
index 000000000..c275ff3e8
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/repacks.yml.tmpl
@@ -0,0 +1,65 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for the non en-US locales"
+ owner: "release@mozilla.com"
+
+mapping:
+{% for locale in locales %}
+ # common deliverables
+ {{ locale }}:
+ complete_mar:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.complete.mar
+ s3_key: {{ s3_prefix }}update/{{ platform }}/{{ locale }}/firefox-{{ version }}.complete.mar
+ checksum:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.checksums
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.checksums
+ checksum_sig:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.checksums.asc
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.checksums.asc
+ xpi:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.langpack.xpi
+ s3_key: {{ s3_prefix }}{{ platform }}/xpi/{{ locale }}.xpi
+
+ {% if platform == "win32" %}
+ full_installer:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer.exe
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
+ {% if "esr" not in version %}
+ stub_installer:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer-stub.exe
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup Stub {{ version }}.exe
+ {% endif %}
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
+ {% endif %}
+
+ {% if platform == "win64" %}
+ full_installer:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.installer.exe
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox Setup {{ version }}.exe
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.zip
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.zip
+ {% endif %}
+
+ {% if platform == "linux-i686" %}
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.tar.bz2
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
+ {% endif %}
+
+ {% if platform == "linux-x86_64" %}
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.tar.bz2
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/firefox-{{ version }}.tar.bz2
+ {% endif %}
+
+ {% if platform == "mac" %}
+ package:
+ artifact: {{ artifact_base_url }}/firefox-{{ app_version }}.{{ locale }}.{{ platform }}.dmg
+ s3_key: {{ s3_prefix }}{{ platform }}/{{ locale }}/Firefox {{ version }}.dmg
+ {% endif %}
+
+{% endfor %}
diff --git a/testing/mozharness/configs/beetmover/snap.yml.tmpl b/testing/mozharness/configs/beetmover/snap.yml.tmpl
new file mode 100644
index 000000000..afc8f35ce
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/snap.yml.tmpl
@@ -0,0 +1,11 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for snap iamge"
+ owner: "release@mozilla.com"
+
+mapping:
+ all:
+ snap:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.snap
+ s3_key: {{ s3_prefix }}snap/firefox-{{ version }}.snap
diff --git a/testing/mozharness/configs/beetmover/snap_checksums.yml.tmpl b/testing/mozharness/configs/beetmover/snap_checksums.yml.tmpl
new file mode 100644
index 000000000..aa905d38d
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/snap_checksums.yml.tmpl
@@ -0,0 +1,14 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for snap checksums"
+ owner: "release@mozilla.com"
+
+mapping:
+ all:
+ snap_checksum:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.snap.checksums
+ s3_key: {{ s3_prefix }}snap/firefox-{{ version }}.snap.checksums
+ snap_checksum_asc:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.snap.checksums.asc
+ s3_key: {{ s3_prefix }}snap/firefox-{{ version }}.snap.checksums.asc
diff --git a/testing/mozharness/configs/beetmover/source.yml.tmpl b/testing/mozharness/configs/beetmover/source.yml.tmpl
new file mode 100644
index 000000000..f991f257c
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/source.yml.tmpl
@@ -0,0 +1,14 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for source bundles"
+ owner: "release@mozilla.com"
+
+mapping:
+ all:
+ source_bundle:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.bundle
+ s3_key: {{ s3_prefix }}source/firefox-{{ version }}.bundle
+ source_tar:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.source.tar.xz
+ s3_key: {{ s3_prefix }}source/firefox-{{ version }}.source.tar.xz
diff --git a/testing/mozharness/configs/beetmover/source_checksums.yml.tmpl b/testing/mozharness/configs/beetmover/source_checksums.yml.tmpl
new file mode 100644
index 000000000..0dd228c24
--- /dev/null
+++ b/testing/mozharness/configs/beetmover/source_checksums.yml.tmpl
@@ -0,0 +1,14 @@
+---
+metadata:
+ name: "Beet Mover Manifest"
+ description: "Maps artifact locations to s3 key names for source bundle checksums"
+ owner: "release@mozilla.com"
+
+mapping:
+ all:
+ source_checksum:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.source.checksums
+ s3_key: {{ s3_prefix }}source/firefox-{{ version }}.source.checksums
+ source_checksum_asc:
+ artifact: {{ artifact_base_url }}/firefox-{{ version }}.source.checksums.asc
+ s3_key: {{ s3_prefix }}source/firefox-{{ version }}.source.checksums.asc
diff --git a/testing/mozharness/configs/builds/branch_specifics.py b/testing/mozharness/configs/builds/branch_specifics.py
new file mode 100644
index 000000000..43f14c5ad
--- /dev/null
+++ b/testing/mozharness/configs/builds/branch_specifics.py
@@ -0,0 +1,469 @@
+# this is a dict of branch specific keys/values. As this fills up and more
+# fx build factories are ported, we might deal with this differently
+
+# we should be able to port this in-tree and have the respective repos and
+# revisions handle what goes on in here. Tracking: bug 978510
+
+# example config and explanation of how it works:
+# config = {
+# # if a branch matches a key below, override items in self.config with
+# # items in the key's value.
+# # this override can be done for every platform or at a platform level
+# '<branch-name>': {
+# # global config items (applies to all platforms and build types)
+# 'repo_path': "projects/<branch-name>",
+# 'graph_server_branch_name': "Firefox",
+#
+# # platform config items (applies to specific platforms)
+# 'platform_overrides': {
+# # if a platform matches a key below, override items in
+# # self.config with items in the key's value
+# 'linux64-debug': {
+# 'upload_symbols': False,
+# },
+# 'win64': {
+# 'enable_checktests': False,
+# },
+# }
+# },
+# }
+
+config = {
+ ### release branches
+ "mozilla-central": {
+ "repo_path": 'mozilla-central',
+ "update_channel": "nightly",
+ "graph_server_branch_name": "Firefox",
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'mozilla-release': {
+ 'enable_release_promotion': True,
+ 'repo_path': 'releases/mozilla-release',
+ 'update_channel': 'release',
+ 'branch_uses_per_checkin_strategy': True,
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ 'platform_overrides': {
+ 'linux': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/release',
+ 'force_clobber': True,
+ },
+ 'linux64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/release',
+ 'force_clobber': True,
+ },
+ 'macosx64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/release',
+ 'force_clobber': True,
+ },
+ 'win32': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/release',
+ 'force_clobber': True,
+ },
+ 'win64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/release',
+ 'force_clobber': True,
+ },
+ 'linux-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-cc': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'linux64-tsan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ 'win32-debug': {
+ 'update_channel': 'default',
+ },
+ 'win32-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ 'win64-debug': {
+ 'update_channel': 'default',
+ },
+ 'win64-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ },
+ },
+ 'mozilla-beta': {
+ 'enable_release_promotion': 1,
+ 'repo_path': 'releases/mozilla-beta',
+ 'update_channel': 'beta',
+ 'branch_uses_per_checkin_strategy': True,
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ 'platform_overrides': {
+ 'linux': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/beta',
+ 'force_clobber': True,
+ },
+ 'linux64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/beta',
+ 'force_clobber': True,
+ },
+ 'macosx64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/beta',
+ 'force_clobber': True,
+ },
+ 'win32': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/beta',
+ 'force_clobber': True,
+ },
+ 'win64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/beta',
+ 'force_clobber': True,
+ },
+ 'linux-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-cc': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'linux64-tsan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ 'win32-debug': {
+ 'update_channel': 'default',
+ },
+ 'win32-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ 'win64-debug': {
+ 'update_channel': 'default',
+ },
+ 'win64-add-on-devel': {
+ 'update_channel': 'default',
+ },
+ },
+ },
+ 'mozilla-esr52': {
+ 'enable_release_promotion': True,
+ 'repo_path': 'releases/mozilla-esr52',
+ 'update_channel': 'esr',
+ 'branch_uses_per_checkin_strategy': True,
+ 'use_branch_in_symbols_extra_buildid': False,
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ 'platform_overrides': {
+ 'linux': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/release',
+ 'force_clobber': True,
+ },
+ 'linux64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/release',
+ 'force_clobber': True,
+ },
+ 'macosx64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/release',
+ 'force_clobber': True,
+ },
+ 'win32': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/release',
+ 'force_clobber': True,
+ },
+ 'win64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/release',
+ 'force_clobber': True,
+ },
+ 'linux-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-cc': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'linux64-tsan': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'win32-debug': {
+ 'update_channel': 'default',
+ },
+ 'win64-debug': {
+ 'update_channel': 'default',
+ },
+ },
+ },
+ 'mozilla-aurora': {
+ 'repo_path': 'releases/mozilla-aurora',
+ 'update_channel': 'aurora',
+ 'branch_uses_per_checkin_strategy': True,
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'try': {
+ 'repo_path': 'try',
+ 'clone_by_revision': True,
+ 'clone_with_purge': True,
+ 'tinderbox_build_dir': '%(who)s-%(got_revision)s',
+ 'to_tinderbox_dated': False,
+ 'include_post_upload_builddir': True,
+ 'release_to_try_builds': True,
+ 'stage_server': 'upload.trybld.productdelivery.prod.mozaws.net',
+ 'stage_username': 'trybld',
+ 'stage_ssh_key': 'trybld_dsa',
+ 'branch_supports_uploadsymbols': False,
+ 'use_clobberer': False,
+ },
+
+ ### project branches
+ #'fx-team': {}, #Bug 1296396
+ 'gum': {
+ 'branch_uses_per_checkin_strategy': True,
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'mozilla-inbound': {
+ 'repo_path': 'integration/mozilla-inbound',
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'autoland': {
+ 'repo_path': 'integration/autoland',
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'ux': {
+ "graph_server_branch_name": "UX",
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ # When build promotion goes live the mozconfig changes are probably better
+ # expressed once in files like configs/builds/releng_base_windows_32_builds.py
+ 'date': {
+ 'update_channel': 'beta-dev',
+ 'enable_release_promotion': 1,
+ 'platform_overrides': {
+ 'linux': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/beta',
+ },
+ 'linux-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/beta',
+ },
+ 'linux64-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-cc': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'linux64-tsan': {
+ 'update_channel': 'default',
+ },
+ 'macosx64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/beta',
+ },
+ 'macosx64-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'win32': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/beta',
+ },
+ 'win32-debug': {
+ 'update_channel': 'default',
+ },
+ 'win64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/beta',
+ },
+ 'win64-debug': {
+ 'update_channel': 'default',
+ },
+ },
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'cypress': {
+ # bug 1164935
+ 'branch_uses_per_checkin_strategy': True,
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+
+ ### other branches that do not require anything special:
+ 'alder': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'ash': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'birch': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ # 'build-system': {}
+ 'cedar': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'elm': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'fig': {},
+ 'graphics': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ # 'holly': {},
+ 'jamun': {
+ 'update_channel': 'release-dev',
+ 'enable_release_promotion': 1,
+ 'platform_overrides': {
+ 'linux': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/release',
+ },
+ 'linux-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/release',
+ },
+ 'linux64-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-asan': {
+ 'update_channel': 'default',
+ },
+ 'linux64-cc': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'linux64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'linux64-tsan': {
+ 'update_channel': 'default',
+ },
+ 'macosx64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/release',
+ },
+ 'macosx64-debug': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an': {
+ 'update_channel': 'default',
+ },
+ 'macosx64-st-an-debug': {
+ 'update_channel': 'default',
+ },
+ 'win32': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/release',
+ },
+ 'win32-debug': {
+ 'update_channel': 'default',
+ },
+ 'win64': {
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/release',
+ },
+ 'win64-debug': {
+ 'update_channel': 'default',
+ },
+ },
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'larch': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ # 'maple': {},
+ 'oak': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+ 'pine': {
+ 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ },
+}
diff --git a/testing/mozharness/configs/builds/build_pool_specifics.py b/testing/mozharness/configs/builds/build_pool_specifics.py
new file mode 100644
index 000000000..8559b48b7
--- /dev/null
+++ b/testing/mozharness/configs/builds/build_pool_specifics.py
@@ -0,0 +1,44 @@
+# this is a dict of pool specific keys/values. As this fills up and more
+# fx build factories are ported, we might deal with this differently
+
+config = {
+ "staging": {
+ # if not clobberer_url, only clobber 'abs_work_dir'
+ # if true: possibly clobber, clobberer
+ # see PurgeMixin for clobber() conditions
+ 'clobberer_url': 'https://api-pub-build.allizom.org/clobberer/lastclobber',
+ # staging we should use MozillaTest
+ # but in production we let the self.branch decide via
+ # self._query_graph_server_branch_name()
+ "graph_server_branch_name": "MozillaTest",
+ 'graph_server': 'graphs.allizom.org',
+ 'stage_server': 'upload.ffxbld.productdelivery.stage.mozaws.net',
+ "sendchange_masters": ["dev-master1.srv.releng.scl3.mozilla.com:9038"],
+ 'taskcluster_index': 'index.garbage.staging',
+ 'post_upload_extra': ['--bucket-prefix', 'net-mozaws-stage-delivery',
+ '--url-prefix', 'http://ftp.stage.mozaws.net/',
+ ],
+ },
+ "production": {
+ # if not clobberer_url, only clobber 'abs_work_dir'
+ # if true: possibly clobber, clobberer
+ # see PurgeMixin for clobber() conditions
+ 'clobberer_url': 'https://api.pub.build.mozilla.org/clobberer/lastclobber',
+ 'graph_server': 'graphs.mozilla.org',
+ # bug 1216907, set this at branch level
+ # 'stage_server': 'upload.ffxbld.productdelivery.prod.mozaws.net',
+ "sendchange_masters": ["buildbot-master81.build.mozilla.org:9301"],
+ 'taskcluster_index': 'index',
+ },
+ "taskcluster": {
+ 'graph_server': 'graphs.mozilla.org',
+ 'stage_server': 'ignored',
+ # use the relengapi proxy to talk to tooltool
+ "tooltool_servers": ['http://relengapi/tooltool/'],
+ "tooltool_url": 'http://relengapi/tooltool/',
+ 'upload_env': {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': '/home/worker/artifacts',
+ },
+ },
+}
diff --git a/testing/mozharness/configs/builds/releng_base_android_64_builds.py b/testing/mozharness/configs/builds/releng_base_android_64_builds.py
new file mode 100644
index 000000000..0ffd929c3
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_android_64_builds.py
@@ -0,0 +1,111 @@
+import os
+
+config = {
+ #########################################################################
+ ######## ANDROID GENERIC CONFIG KEYS/VAlUES
+
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'multi-l10n',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ # mock shtuff
+ 'mock_mozilla_dir': '/builds/mock_mozilla',
+ 'mock_target': 'mozilla-centos6-x86_64-android',
+ 'mock_files': [
+ ('/home/cltbld/.ssh', '/home/mock_mozilla/.ssh'),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/home/cltbld/.boto', '/builds/.boto'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/builds/mozilla-api.key', '/builds/mozilla-api.key'),
+ ('/builds/mozilla-fennec-geoloc-api.key', '/builds/mozilla-fennec-geoloc-api.key'),
+ ('/builds/crash-stats-api.token', '/builds/crash-stats-api.token'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+ 'secret_files': [
+ {'filename': '/builds/mozilla-fennec-geoloc-api.key',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/mozilla-fennec-geoloc-api.key',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/adjust-sdk.token',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/adjust-sdk.token',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/adjust-sdk-beta.token',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/adjust-sdk-beta.token',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ ],
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ 'multi_locale': True,
+ #########################################################################
+
+
+ #########################################################################
+ 'base_name': 'Android 2.3 %(branch)s',
+ 'platform': 'android',
+ 'stage_platform': 'android',
+ 'stage_product': 'mobile',
+ 'publish_nightly_en_US_routes': True,
+ 'post_upload_include_platform': True,
+ 'enable_max_vsize': False,
+ 'use_package_as_marfile': True,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/bin:/usr/bin',
+ 'SHIP_LICENSED_FONTS': '1',
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/home/mock_mozilla/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/linux/minidump_stackwalk',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'mock_packages': ['autoconf213', 'mozilla-python27-mercurial', 'yasm',
+ 'ccache', 'zip', "gcc472_0moz1", "gcc473_0moz1",
+ 'java-1.7.0-openjdk-devel', 'zlib-devel',
+ 'glibc-static', 'openssh-clients', 'mpfr',
+ 'wget', 'glibc.i686', 'libstdc++.i686',
+ 'zlib.i686', 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'ant', 'ant-apache-regexp'
+ ],
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android/nightly',
+ 'tooltool_manifest_src': "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_base_linux_32_builds.py b/testing/mozharness/configs/builds/releng_base_linux_32_builds.py
new file mode 100644
index 000000000..393cf8983
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_linux_32_builds.py
@@ -0,0 +1,160 @@
+import os
+
+config = {
+ #########################################################################
+ ######## LINUX GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_linux_64_builds.py
+
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ # mock shtuff
+ 'mock_mozilla_dir': '/builds/mock_mozilla',
+ 'mock_target': 'mozilla-centos6-x86_64',
+ 'mock_files': [
+ ('/home/cltbld/.ssh', '/home/mock_mozilla/.ssh'),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/home/cltbld/.boto', '/builds/.boto'),
+ ('/builds/gapi.data', '/builds/gapi.data'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/builds/mozilla-desktop-geoloc-api.key', '/builds/mozilla-desktop-geoloc-api.key'),
+ ('/builds/crash-stats-api.token', '/builds/crash-stats-api.token'),
+ ('/builds/adjust-sdk.token', '/builds/adjust-sdk.token'),
+ ('/builds/adjust-sdk-beta.token', '/builds/adjust-sdk-beta.token'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+ 'secret_files': [
+ {'filename': '/builds/gapi.data',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/gapi.data',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/mozilla-desktop-geoloc-api.key',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/mozilla-desktop-geoloc-api.key',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/adjust-sdk.token',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/adjust-sdk.token',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/adjust-sdk-beta.token',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/adjust-sdk-beta.token',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ ],
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'Linux_%(branch)s',
+ 'platform': 'linux',
+ 'stage_platform': 'linux',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ # 32 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib/ccache:\
+/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:\
+/tools/python27/bin:/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib",
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/home/mock_mozilla/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/linux/minidump_stackwalk',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind',
+ ######## 32 bit specific ###########
+ 'glibc-static.i686', 'libstdc++-static.i686',
+ 'gtk2-devel.i686', 'libnotify-devel.i686',
+ 'alsa-lib-devel.i686', 'libcurl-devel.i686',
+ 'wireless-tools-devel.i686', 'libX11-devel.i686',
+ 'libXt-devel.i686', 'mesa-libGL-devel.i686',
+ 'gnome-vfs2-devel.i686', 'GConf2-devel.i686',
+ 'pulseaudio-libs-devel.i686',
+ 'gstreamer-devel.i686', 'gstreamer-plugins-base-devel.i686',
+ # Packages already installed in the mock environment, as x86_64
+ # packages.
+ 'glibc-devel.i686', 'libgcc.i686', 'libstdc++-devel.i686',
+ # yum likes to install .x86_64 -devel packages that satisfy .i686
+ # -devel packages dependencies. So manually install the dependencies
+ # of the above packages.
+ 'ORBit2-devel.i686', 'atk-devel.i686', 'cairo-devel.i686',
+ 'check-devel.i686', 'dbus-devel.i686', 'dbus-glib-devel.i686',
+ 'fontconfig-devel.i686', 'glib2-devel.i686',
+ 'hal-devel.i686', 'libICE-devel.i686', 'libIDL-devel.i686',
+ 'libSM-devel.i686', 'libXau-devel.i686', 'libXcomposite-devel.i686',
+ 'libXcursor-devel.i686', 'libXdamage-devel.i686',
+ 'libXdmcp-devel.i686', 'libXext-devel.i686', 'libXfixes-devel.i686',
+ 'libXft-devel.i686', 'libXi-devel.i686', 'libXinerama-devel.i686',
+ 'libXrandr-devel.i686', 'libXrender-devel.i686',
+ 'libXxf86vm-devel.i686', 'libdrm-devel.i686', 'libidn-devel.i686',
+ 'libpng-devel.i686', 'libxcb-devel.i686', 'libxml2-devel.i686',
+ 'pango-devel.i686', 'perl-devel.i686', 'pixman-devel.i686',
+ 'zlib-devel.i686',
+ # Freetype packages need to be installed be version, because a newer
+ # version is available, but we don't want it for Firefox builds.
+ 'freetype-2.3.11-6.el6_1.8.i686',
+ 'freetype-devel-2.3.11-6.el6_1.8.i686',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ ######## 32 bit specific ###########
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/nightly',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux32/\
+releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_base_linux_64_builds.py b/testing/mozharness/configs/builds/releng_base_linux_64_builds.py
new file mode 100644
index 000000000..fe04b73b5
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_linux_64_builds.py
@@ -0,0 +1,139 @@
+import os
+
+config = {
+ #########################################################################
+ ######## LINUX GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 64 bit keys/values please add them
+ # below under the '64 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_linux_64_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ # mock shtuff
+ 'mock_mozilla_dir': '/builds/mock_mozilla',
+ 'mock_target': 'mozilla-centos6-x86_64',
+ 'mock_files': [
+ ('/home/cltbld/.ssh', '/home/mock_mozilla/.ssh'),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/home/cltbld/.boto', '/builds/.boto'),
+ ('/builds/gapi.data', '/builds/gapi.data'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/builds/mozilla-desktop-geoloc-api.key', '/builds/mozilla-desktop-geoloc-api.key'),
+ ('/builds/crash-stats-api.token', '/builds/crash-stats-api.token'),
+ ('/builds/adjust-sdk.token', '/builds/adjust-sdk.token'),
+ ('/builds/adjust-sdk-beta.token', '/builds/adjust-sdk-beta.token'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+ 'secret_files': [
+ {'filename': '/builds/gapi.data',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/gapi.data',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/mozilla-desktop-geoloc-api.key',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/mozilla-desktop-geoloc-api.key',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/adjust-sdk.token',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/adjust-sdk.token',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/adjust-sdk-beta.token',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/adjust-sdk-beta.token',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ ],
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'Linux_x86-64_%(branch)s',
+ 'platform': 'linux64',
+ 'stage_platform': 'linux64',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib64",
+ ##
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/home/mock_mozilla/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/linux64/minidump_stackwalk',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind', 'dbus-x11',
+ ######## 64 bit specific ###########
+ 'glibc-static', 'libstdc++-static',
+ 'gtk2-devel', 'libnotify-devel',
+ 'alsa-lib-devel', 'libcurl-devel', 'wireless-tools-devel',
+ 'libX11-devel', 'libXt-devel', 'mesa-libGL-devel', 'gnome-vfs2-devel',
+ 'GConf2-devel',
+ ### from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'pulseaudio-libs-devel', 'gstreamer-devel',
+ 'gstreamer-plugins-base-devel', 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64'
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/nightly',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_base_mac_64_builds.py b/testing/mozharness/configs/builds/releng_base_mac_64_builds.py
new file mode 100644
index 000000000..e6e338ada
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_mac_64_builds.py
@@ -0,0 +1,79 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## MACOSX GENERIC CONFIG KEYS/VAlUES
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ # 'setup-mock',
+ 'checkout-sources',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox/x86_64',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'OS X 10.7 %(branch)s',
+ 'platform': 'macosx64',
+ 'stage_platform': 'macosx64',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'CHOWN_ROOT': '~/bin/chown_root',
+ 'CHOWN_REVERT': '~/bin/chown_revert',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/Users/cltbld/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/osx64/minidump_stackwalk',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/nightly',
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/macosx64/releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py b/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
new file mode 100644
index 000000000..47738e1ce
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
@@ -0,0 +1,83 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## MACOSX CROSS GENERIC CONFIG KEYS/VAlUES
+
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'build',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'secret_files': [
+ {'filename': '/builds/gapi.data',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/gapi.data',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/mozilla-desktop-geoloc-api.key',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/mozilla-desktop-geoloc-api.key',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ ],
+ 'enable_ccache': True,
+ 'enable_check_test': False,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox/',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'OS X 10.7 %(branch)s',
+ 'platform': 'macosx64',
+ 'stage_platform': 'macosx64',
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ ##
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/Users/cltbld/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/linux64/minidump_stackwalk',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/nightly',
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/macosx64/cross-releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_base_windows_32_builds.py b/testing/mozharness/configs/builds/releng_base_windows_32_builds.py
new file mode 100644
index 000000000..0a6708a1f
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_windows_32_builds.py
@@ -0,0 +1,95 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_windows_32_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": [
+ sys.executable,
+ 'c:\\mozilla-build\\buildbotve\\scripts\\buildbot'
+ ],
+ "make": [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': False,
+ 'vcs_share_base': 'C:/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'WINNT_5.2_%(branch)s',
+ 'platform': 'win32',
+ 'stage_platform': 'win32',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/8.0/Debuggers/x64/srcsrv/pdbstr.exe',
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/c/Users/cltbld/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/win32/minidump_stackwalk.exe',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/nightly',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win32/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_base_windows_64_builds.py b/testing/mozharness/configs/builds/releng_base_windows_64_builds.py
new file mode 100644
index 000000000..ab12fc982
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_base_windows_64_builds.py
@@ -0,0 +1,93 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_windows_64_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": [
+ sys.executable,
+ 'c:\\mozilla-build\\buildbotve\\scripts\\buildbot'
+ ],
+ "make": [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': False,
+ 'vcs_share_base': 'C:/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': True,
+ 'enable_unittest_sendchange': True,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'WINNT_6.1_x86-64_%(branch)s',
+ 'platform': 'win64',
+ 'stage_platform': 'win64',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/8.0/Debuggers/x64/srcsrv/pdbstr.exe',
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'upload_env': {
+ # stage_server is dictated from build_pool_specifics.py
+ 'UPLOAD_HOST': '%(stage_server)s',
+ 'UPLOAD_USER': '%(stage_username)s',
+ 'UPLOAD_SSH_KEY': '/c/Users/cltbld/.ssh/%(stage_ssh_key)s',
+ 'UPLOAD_TO_TEMP': '1',
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/win64/minidump_stackwalk.exe',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/nightly',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win64/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15.py
new file mode 100644
index 000000000..f25060340
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15.py
@@ -0,0 +1,8 @@
+config = {
+ 'base_name': 'Android armv7 API 15+ %(branch)s',
+ 'stage_platform': 'android-api-15',
+ 'build_type': 'api-15-opt',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_debug.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_debug.py
new file mode 100644
index 000000000..22787e7f9
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_debug.py
@@ -0,0 +1,9 @@
+config = {
+ 'base_name': 'Android armv7 API 15+ %(branch)s debug',
+ 'stage_platform': 'android-api-15-debug',
+ 'build_type': 'api-15-debug',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15/debug',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+ 'debug_build': True,
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle.py
new file mode 100644
index 000000000..7c03fc1dc
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle.py
@@ -0,0 +1,18 @@
+config = {
+ 'base_name': 'Android armv7 API 15+ %(branch)s Gradle',
+ 'stage_platform': 'android-api-15-gradle',
+ 'build_type': 'api-15-gradle',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15-gradle/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+ # It's not obvious, but postflight_build is after packaging, so the Gecko
+ # binaries are in the object directory, ready to be packaged into the
+ # GeckoView AAR.
+ 'postflight_build_mach_commands': [
+ ['gradle',
+ 'geckoview:assembleWithGeckoBinaries',
+ 'geckoview_example:assembleWithGeckoBinaries',
+ 'uploadArchives',
+ ],
+ ],
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle_dependencies.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle_dependencies.py
new file mode 100644
index 000000000..c8bee2562
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_gradle_dependencies.py
@@ -0,0 +1,21 @@
+config = {
+ 'base_name': 'Android armv7 API 15+ Gradle dependencies %(branch)s',
+ 'stage_platform': 'android-api-15-gradle-dependencies',
+ 'build_type': 'api-15-opt',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15-gradle-dependencies/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android-gradle-dependencies/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+ 'postflight_build_mach_commands': [
+ ['gradle',
+ 'assembleAutomationRelease',
+ 'assembleAutomationDebug',
+ 'assembleAutomationDebugAndroidTest',
+ 'checkstyle',
+ # Does not include Gecko binaries -- see mobile/android/gradle/with_gecko_binaries.gradle.
+ 'geckoview:assembleWithoutGeckoBinaries',
+ # So that we pick up the test dependencies for the builders.
+ 'geckoview_example:assembleWithoutGeckoBinaries',
+ 'geckoview_example:assembleWithoutGeckoBinariesAndroidTest',
+ ],
+ ],
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_partner_sample1.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_partner_sample1.py
new file mode 100644
index 000000000..d2e03f78c
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_15_partner_sample1.py
@@ -0,0 +1,9 @@
+config = {
+ 'base_name': 'Android armv7 API 15+ partner Sample1 %(branch)s',
+ 'stage_platform': 'android-api-15-partner-sample1',
+ 'build_type': 'api-15-partner-sample1-opt',
+ 'src_mozconfig': None, # use manifest to determine mozconfig src
+ 'src_mozconfig_manifest': 'partner/mozconfigs/mozconfig1.json',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_checkstyle.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_checkstyle.py
new file mode 100644
index 000000000..6643bcb1b
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_checkstyle.py
@@ -0,0 +1,11 @@
+config = {
+ 'base_name': 'Android checkstyle %(branch)s',
+ 'stage_platform': 'android-checkstyle',
+ 'build_type': 'api-15-opt',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15-frontend/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android-frontend/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+ 'postflight_build_mach_commands': [
+ ['gradle', 'app:checkstyle'],
+ ],
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_lint.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_lint.py
new file mode 100644
index 000000000..f377d416c
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_lint.py
@@ -0,0 +1,11 @@
+config = {
+ 'base_name': 'Android lint %(branch)s',
+ 'stage_platform': 'android-lint',
+ 'build_type': 'api-15-opt',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15-frontend/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android-frontend/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+ 'postflight_build_mach_commands': [
+ ['gradle', 'app:lintAutomationDebug'],
+ ],
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_test.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_test.py
new file mode 100644
index 000000000..3e1a1492f
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_test.py
@@ -0,0 +1,11 @@
+config = {
+ 'base_name': 'Android armv7 unit tests %(branch)s',
+ 'stage_platform': 'android-test',
+ 'build_type': 'api-15-opt',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-15-frontend/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android-frontend/releng.manifest',
+ 'multi_locale_config_platform': 'android',
+ 'postflight_build_mach_commands': [
+ ['gradle', 'app:testAutomationDebugUnitTest'],
+ ],
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_x86.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_x86.py
new file mode 100644
index 000000000..288f0d65d
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_android_configs/64_x86.py
@@ -0,0 +1,8 @@
+config = {
+ 'base_name': 'Android 4.2 x86 %(branch)s',
+ 'stage_platform': 'android-x86',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'x86-opt',
+ 'src_mozconfig': 'mobile/android/config/mozconfigs/android-x86/nightly',
+ 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android-x86/releng.manifest',
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/32_artifact.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/32_artifact.py
new file mode 100644
index 000000000..f016d5606
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/32_artifact.py
@@ -0,0 +1,116 @@
+import os
+
+config = {
+ #########################################################################
+ ######## LINUX GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_linux_64_builds.py
+
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'Linux_%(branch)s_Artifact_build',
+ 'platform': 'linux',
+ 'stage_platform': 'linux',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ # 32 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib/ccache:\
+/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:\
+/tools/python27/bin:/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib",
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind',
+ ######## 32 bit specific ###########
+ 'glibc-static.i686', 'libstdc++-static.i686',
+ 'gtk2-devel.i686', 'libnotify-devel.i686',
+ 'alsa-lib-devel.i686', 'libcurl-devel.i686',
+ 'wireless-tools-devel.i686', 'libX11-devel.i686',
+ 'libXt-devel.i686', 'mesa-libGL-devel.i686',
+ 'gnome-vfs2-devel.i686', 'GConf2-devel.i686',
+ 'pulseaudio-libs-devel.i686',
+ 'gstreamer-devel.i686', 'gstreamer-plugins-base-devel.i686',
+ # Packages already installed in the mock environment, as x86_64
+ # packages.
+ 'glibc-devel.i686', 'libgcc.i686', 'libstdc++-devel.i686',
+ # yum likes to install .x86_64 -devel packages that satisfy .i686
+ # -devel packages dependencies. So manually install the dependencies
+ # of the above packages.
+ 'ORBit2-devel.i686', 'atk-devel.i686', 'cairo-devel.i686',
+ 'check-devel.i686', 'dbus-devel.i686', 'dbus-glib-devel.i686',
+ 'fontconfig-devel.i686', 'glib2-devel.i686',
+ 'hal-devel.i686', 'libICE-devel.i686', 'libIDL-devel.i686',
+ 'libSM-devel.i686', 'libXau-devel.i686', 'libXcomposite-devel.i686',
+ 'libXcursor-devel.i686', 'libXdamage-devel.i686',
+ 'libXdmcp-devel.i686', 'libXext-devel.i686', 'libXfixes-devel.i686',
+ 'libXft-devel.i686', 'libXi-devel.i686', 'libXinerama-devel.i686',
+ 'libXrandr-devel.i686', 'libXrender-devel.i686',
+ 'libXxf86vm-devel.i686', 'libdrm-devel.i686', 'libidn-devel.i686',
+ 'libpng-devel.i686', 'libxcb-devel.i686', 'libxml2-devel.i686',
+ 'pango-devel.i686', 'perl-devel.i686', 'pixman-devel.i686',
+ 'zlib-devel.i686',
+ # Freetype packages need to be installed be version, because a newer
+ # version is available, but we don't want it for Firefox builds.
+ 'freetype-2.3.11-6.el6_1.8.i686',
+ 'freetype-devel-2.3.11-6.el6_1.8.i686',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ ######## 32 bit specific ###########
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux32/\
+releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug.py
new file mode 100644
index 000000000..914bfdfe3
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug.py
@@ -0,0 +1,45 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'debug_build': True,
+ 'stage_platform': 'linux-debug',
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 32 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ # 32 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': '/tools/gcc-4.3.3/installed/lib:\
+%s/dist/bin' % (MOZ_OBJDIR,),
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'TINDERBOX_OUTPUT': '1',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/debug',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug_artifact.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug_artifact.py
new file mode 100644
index 000000000..88ff8450a
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/32_debug_artifact.py
@@ -0,0 +1,122 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ #########################################################################
+ ######## LINUX GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_linux_64_builds.py
+
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': MOZ_OBJDIR,
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ # debug specific
+ 'debug_build': True,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'Linux_%(branch)s_Artifact_build',
+ 'platform': 'linux',
+ 'stage_platform': 'linux-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ # debug-specific
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ # 32 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib/ccache:\
+/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:\
+/tools/python27/bin:/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib",
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind',
+ ######## 32 bit specific ###########
+ 'glibc-static.i686', 'libstdc++-static.i686',
+ 'gtk2-devel.i686', 'libnotify-devel.i686',
+ 'alsa-lib-devel.i686', 'libcurl-devel.i686',
+ 'wireless-tools-devel.i686', 'libX11-devel.i686',
+ 'libXt-devel.i686', 'mesa-libGL-devel.i686',
+ 'gnome-vfs2-devel.i686', 'GConf2-devel.i686',
+ 'pulseaudio-libs-devel.i686',
+ 'gstreamer-devel.i686', 'gstreamer-plugins-base-devel.i686',
+ # Packages already installed in the mock environment, as x86_64
+ # packages.
+ 'glibc-devel.i686', 'libgcc.i686', 'libstdc++-devel.i686',
+ # yum likes to install .x86_64 -devel packages that satisfy .i686
+ # -devel packages dependencies. So manually install the dependencies
+ # of the above packages.
+ 'ORBit2-devel.i686', 'atk-devel.i686', 'cairo-devel.i686',
+ 'check-devel.i686', 'dbus-devel.i686', 'dbus-glib-devel.i686',
+ 'fontconfig-devel.i686', 'glib2-devel.i686',
+ 'hal-devel.i686', 'libICE-devel.i686', 'libIDL-devel.i686',
+ 'libSM-devel.i686', 'libXau-devel.i686', 'libXcomposite-devel.i686',
+ 'libXcursor-devel.i686', 'libXdamage-devel.i686',
+ 'libXdmcp-devel.i686', 'libXext-devel.i686', 'libXfixes-devel.i686',
+ 'libXft-devel.i686', 'libXi-devel.i686', 'libXinerama-devel.i686',
+ 'libXrandr-devel.i686', 'libXrender-devel.i686',
+ 'libXxf86vm-devel.i686', 'libdrm-devel.i686', 'libidn-devel.i686',
+ 'libpng-devel.i686', 'libxcb-devel.i686', 'libxml2-devel.i686',
+ 'pango-devel.i686', 'perl-devel.i686', 'pixman-devel.i686',
+ 'zlib-devel.i686',
+ # Freetype packages need to be installed be version, because a newer
+ # version is available, but we don't want it for Firefox builds.
+ 'freetype-2.3.11-6.el6_1.8.i686',
+ 'freetype-devel-2.3.11-6.el6_1.8.i686',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ ######## 32 bit specific ###########
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux32/debug-artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux32/\
+releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
new file mode 100644
index 000000000..98462a62f
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
@@ -0,0 +1,43 @@
+import os
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+# 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ # 'update',
+ ],
+ 'stage_platform': 'linux64-add-on-devel',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'add-on-devel',
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/home/worker/workspace/build/src/gcc/bin:/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/add-on-devel',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
new file mode 100644
index 000000000..5cbc70ade
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
@@ -0,0 +1,98 @@
+import os
+
+config = {
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'sendchange',
+ # 'generate-build-stats',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'secret_files': [
+ {'filename': '/builds/gapi.data',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/gapi.data',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ {'filename': '/builds/mozilla-desktop-geoloc-api.key',
+ 'secret_name': 'project/releng/gecko/build/level-%(scm-level)s/mozilla-desktop-geoloc-api.key',
+ 'min_scm_level': 2, 'default': 'try-build-has-no-secrets'},
+ ],
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'Linux_x86-64_%(branch)s_Artifact_build',
+ 'platform': 'linux64',
+ 'stage_platform': 'linux64',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib64",
+ ##
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind', 'dbus-x11',
+ ######## 64 bit specific ###########
+ 'glibc-static', 'libstdc++-static',
+ 'gtk2-devel', 'libnotify-devel',
+ 'alsa-lib-devel', 'libcurl-devel', 'wireless-tools-devel',
+ 'libX11-devel', 'libXt-devel', 'mesa-libGL-devel', 'gnome-vfs2-devel',
+ 'GConf2-devel',
+ ### from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'pulseaudio-libs-devel', 'gstreamer-devel',
+ 'gstreamer-plugins-base-devel', 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64'
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+releng.manifest",
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan.py
new file mode 100644
index 000000000..0f57520b5
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan.py
@@ -0,0 +1,48 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ # 'update',
+ ],
+ 'stage_platform': 'linux64-asan',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'asan',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+asan.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/nightly-asan',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_and_debug.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_and_debug.py
new file mode 100644
index 000000000..4ff6a9d2c
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_and_debug.py
@@ -0,0 +1,49 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ # 'update',
+ ],
+ 'stage_platform': 'linux64-asan-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'asan-debug',
+ 'debug_build': True,
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+asan.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/debug-asan',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc.py
new file mode 100644
index 000000000..0f57520b5
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc.py
@@ -0,0 +1,48 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ # 'update',
+ ],
+ 'stage_platform': 'linux64-asan',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'asan',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+asan.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/nightly-asan',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc_and_debug.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc_and_debug.py
new file mode 100644
index 000000000..4ff6a9d2c
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_asan_tc_and_debug.py
@@ -0,0 +1,49 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ # 'update',
+ ],
+ 'stage_platform': 'linux64-asan-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'asan-debug',
+ 'debug_build': True,
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+asan.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/debug-asan',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_code_coverage.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_code_coverage.py
new file mode 100644
index 000000000..3ab4f25a3
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_code_coverage.py
@@ -0,0 +1,45 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'linux64-ccov',
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ 'enable_count_ctors': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/code-coverage',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug.py
new file mode 100644
index 000000000..e97c82fcd
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug.py
@@ -0,0 +1,45 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'linux64-debug',
+ 'debug_build': True,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ # 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': '/tools/gcc-4.3.3/installed/lib64:\
+%s/dist/bin' % (MOZ_OBJDIR,),
+ 'TINDERBOX_OUTPUT': '1',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/debug',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug_artifact.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug_artifact.py
new file mode 100644
index 000000000..d3a82e476
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_debug_artifact.py
@@ -0,0 +1,96 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'sendchange',
+ # 'generate-build-stats',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': MOZ_OBJDIR,
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ # debug specific
+ 'debug_build': True,
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'Linux_x86-64_%(branch)s_Artifact_build',
+ 'platform': 'linux64',
+ 'stage_platform': 'linux64-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ # debug-specific
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib64",
+ ##
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind', 'dbus-x11',
+ ######## 64 bit specific ###########
+ 'glibc-static', 'libstdc++-static',
+ 'gtk2-devel', 'libnotify-devel',
+ 'alsa-lib-devel', 'libcurl-devel', 'wireless-tools-devel',
+ 'libX11-devel', 'libXt-devel', 'mesa-libGL-devel', 'gnome-vfs2-devel',
+ 'GConf2-devel',
+ ### from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'pulseaudio-libs-devel', 'gstreamer-devel',
+ 'gstreamer-plugins-base-devel', 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64'
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/debug-artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+releng.manifest",
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_source.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_source.py
new file mode 100644
index 000000000..dfc87cdf1
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_source.py
@@ -0,0 +1,20 @@
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'package-source',
+ 'generate-source-signing-manifest',
+ ],
+ 'stage_platform': 'source', # Not used, but required by the script
+ 'buildbot_json_path': 'buildprops.json',
+ 'app_ini_path': 'FAKE', # Not used, but required by the script
+ 'objdir': 'obj-firefox',
+ 'env': {
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'LC_ALL': 'C',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/source',
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
new file mode 100644
index 000000000..d4de036de
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
@@ -0,0 +1,50 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'debug_build': True,
+ 'stage_platform': 'linux64-st-an-debug',
+ 'build_type': 'st-an-debug',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+clang.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ 'enable_unittest_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ # 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/\
+debug-static-analysis-clang',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
new file mode 100644
index 000000000..496d89f96
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
@@ -0,0 +1,88 @@
+import os
+
+config = {
+ # note: overridden by MOZHARNESS_ACTIONS in TaskCluster tasks
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ # 'generate-build-stats',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': True,
+ 'enable_talos_sendchange': False,
+ 'enable_unittest_sendchange': False,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'Linux_x86-64_%(branch)s_Static_Analysis',
+ 'platform': 'linux64',
+ 'stage_platform': 'linux64-st-an',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ 'LD_LIBRARY_PATH': "/tools/gcc-4.3.3/installed/lib64",
+ ##
+ },
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind', 'dbus-x11',
+ ######## 64 bit specific ###########
+ 'glibc-static', 'libstdc++-static',
+ 'gtk2-devel', 'libnotify-devel',
+ 'alsa-lib-devel', 'libcurl-devel', 'wireless-tools-devel',
+ 'libX11-devel', 'libXt-devel', 'mesa-libGL-devel', 'gnome-vfs2-devel',
+ 'GConf2-devel',
+ ### from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'pulseaudio-libs-devel', 'gstreamer-devel',
+ 'gstreamer-plugins-base-devel', 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64'
+ ],
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/opt-static-analysis-clang',
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/linux64/\
+clang.manifest.centos6',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_tsan.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_tsan.py
new file mode 100644
index 000000000..ae8ed6278
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_tsan.py
@@ -0,0 +1,46 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ # 'check-test',
+ # 'generate-build-stats',
+ # 'update',
+ ],
+ 'stage_platform': 'linux64-tsan',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+tsan.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/opt-tsan',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_valgrind.py b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_valgrind.py
new file mode 100644
index 000000000..97ffd84f8
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_valgrind.py
@@ -0,0 +1,49 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ #'setup-mock',
+ 'build',
+ #'upload-files',
+ #'sendchange',
+ 'check-test',
+ 'valgrind-test',
+ #'generate-build-stats',
+ #'update',
+ ],
+ 'stage_platform': 'linux64-valgrind',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'valgrind',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\
+releng.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'DISPLAY': ':2',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
+/tools/python27-mercurial/bin:/home/cltbld/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/linux64/valgrind',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_add-on-devel.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_add-on-devel.py
new file mode 100644
index 000000000..d54c4d3a6
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_add-on-devel.py
@@ -0,0 +1,44 @@
+import os
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+# 'setup-mock',
+ 'build',
+ 'upload-files',
+# 'sendchange',
+ 'check-test',
+# 'generate-build-stats',
+# 'update',
+ ],
+ 'stage_platform': 'macosx64-add-on-devel',
+ 'publish_nightly_en_US_routes': False,
+ 'build_type': 'add-on-devel',
+ 'platform_supports_post_upload_to_latest': False,
+ 'objdir': 'obj-firefox',
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/add-on-devel',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_artifact.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_artifact.py
new file mode 100644
index 000000000..c4d74c145
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_artifact.py
@@ -0,0 +1,65 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## MACOSX GENERIC CONFIG KEYS/VAlUES
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ # 'setup-mock',
+ 'checkout-sources',
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'OS X 10.7 %(branch)s_Artifact_build',
+ 'platform': 'macosx64',
+ 'stage_platform': 'macosx64',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'CHOWN_ROOT': '~/bin/chown_root',
+ 'CHOWN_REVERT': '~/bin/chown_revert',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/artifact',
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/macosx64/releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_debug.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_debug.py
new file mode 100644
index 000000000..91cbdb62d
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_debug.py
@@ -0,0 +1,43 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'macosx64-debug',
+ 'debug_build': True,
+ 'objdir': 'obj-firefox',
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/debug',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_opt.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_opt.py
new file mode 100644
index 000000000..f29800f14
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_opt.py
@@ -0,0 +1,39 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock',
+ 'build',
+ ],
+ 'stage_platform': 'macosx64-st-an',
+ 'debug_build': False,
+ 'objdir': 'obj-firefox',
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/opt-static-analysis',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_universal.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_universal.py
new file mode 100644
index 000000000..c399b4f4d
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_cross_universal.py
@@ -0,0 +1,4 @@
+config = {
+ 'objdir': 'obj-firefox/x86_64',
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx-universal/nightly',
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug.py
new file mode 100644
index 000000000..374dc12d1
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug.py
@@ -0,0 +1,44 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'macosx64-debug',
+ 'debug_build': True,
+ 'objdir': 'obj-firefox',
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/debug',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug_artifact.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug_artifact.py
new file mode 100644
index 000000000..937ca1291
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_debug_artifact.py
@@ -0,0 +1,65 @@
+import os
+import sys
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ #########################################################################
+ ######## MACOSX GENERIC CONFIG KEYS/VAlUES
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ # 'setup-mock',
+ 'checkout-sources',
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": "/tools/buildbot/bin/buildbot",
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': True,
+ 'vcs_share_base': '/builds/hg-shared',
+ 'objdir': MOZ_OBJDIR,
+ # debug specific
+ 'debug_build': True,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'OS X 10.7 %(branch)s_Artifact_build',
+ 'platform': 'macosx64',
+ 'stage_platform': 'macosx64-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ # debug-specific
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ ## 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ ##
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/debug-artifact',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_mac_configs/64_stat_and_debug.py b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_stat_and_debug.py
new file mode 100644
index 000000000..6dccae7ab
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_mac_configs/64_stat_and_debug.py
@@ -0,0 +1,48 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock',
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'debug_build': True,
+ 'stage_platform': 'macosx64-st-an-debug',
+ 'build_type': 'st-an-debug',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/macosx64/\
+clang.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ 'enable_unittest_sendchange': False,
+ 'objdir': MOZ_OBJDIR,
+ #### 64 bit build specific #####
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': '/builds/hg-shared',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'CCACHE_DIR': '/builds/ccache',
+ 'CCACHE_COMPRESS': '1',
+ 'CCACHE_UMASK': '002',
+ 'LC_ALL': 'C',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ # 64 bit specific
+ 'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
+ '/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/macosx64/debug-static-analysis',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/32_add-on-devel.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_add-on-devel.py
new file mode 100644
index 000000000..ba108ab1f
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_add-on-devel.py
@@ -0,0 +1,38 @@
+import os
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'upload-files',
+# 'sendchange',
+ 'check-test',
+# 'generate-build-stats',
+# 'update',
+ ],
+ 'stage_platform': 'win32-add-on-devel',
+ 'build_type': 'add-on-devel',
+ 'enable_talos_sendchange': False,
+ #### 32 bit build specific #####
+ 'env': {
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/add-on-devel',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/32_artifact.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_artifact.py
new file mode 100644
index 000000000..8bf35fba3
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_artifact.py
@@ -0,0 +1,81 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_windows_32_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": [
+ sys.executable,
+ 'c:\\mozilla-build\\buildbotve\\scripts\\buildbot'
+ ],
+ "make": [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': False,
+ 'vcs_share_base': 'C:/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'WINNT_5.2_%(branch)s_Artifact_build',
+ 'platform': 'win32',
+ 'stage_platform': 'win32',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.0b1;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/8.0/Debuggers/x64/srcsrv/pdbstr.exe',
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win32/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug.py
new file mode 100644
index 000000000..d9b769505
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug.py
@@ -0,0 +1,40 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'win32-debug',
+ 'debug_build': True,
+ 'enable_talos_sendchange': False,
+ #### 32 bit build specific #####
+ 'env': {
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/debug',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug_artifact.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug_artifact.py
new file mode 100644
index 000000000..ad9b2eeaf
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_debug_artifact.py
@@ -0,0 +1,86 @@
+import os
+import sys
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_windows_32_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": [
+ sys.executable,
+ 'c:\\mozilla-build\\buildbotve\\scripts\\buildbot'
+ ],
+ "make": [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': False,
+ 'vcs_share_base': 'C:/builds/hg-shared',
+ 'objdir': MOZ_OBJDIR,
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ # debug specific
+ 'debug_build': True,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'WINNT_5.2_%(branch)s_Artifact_build',
+ 'platform': 'win32',
+ 'stage_platform': 'win32-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'PATH': 'C:/mozilla-build/nsis-3.0b1;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ # debug-specific
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/debug-artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win32/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/32_stat_and_debug.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_stat_and_debug.py
new file mode 100644
index 000000000..e02703462
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/32_stat_and_debug.py
@@ -0,0 +1,44 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ # 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'win32-st-an-debug',
+ 'debug_build': True,
+ 'enable_signing': False,
+ 'enable_talos_sendchange': False,
+ 'enable_unittest_sendchange': False,
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win32/\
+clang.manifest",
+ 'platform_supports_post_upload_to_latest': False,
+ 'objdir': MOZ_OBJDIR,
+ #### 32 bit build specific #####
+ 'env': {
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/win32/debug-static-analysis',
+ 'purge_minsize': 9,
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/64_add-on-devel.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_add-on-devel.py
new file mode 100644
index 000000000..8567c7e72
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_add-on-devel.py
@@ -0,0 +1,37 @@
+import os
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'upload-files',
+# 'sendchange',
+ 'check-test',
+# 'generate-build-stats',
+# 'update',
+ ],
+ 'stage_platform': 'win64-add-on-devel',
+ 'build_type': 'add-on-devel',
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/add-on-devel',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/64_artifact.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_artifact.py
new file mode 100644
index 000000000..b99ebb6b3
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_artifact.py
@@ -0,0 +1,79 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_windows_64_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": [
+ sys.executable,
+ 'c:\\mozilla-build\\buildbotve\\scripts\\buildbot'
+ ],
+ "make": [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': False,
+ 'vcs_share_base': 'C:/builds/hg-shared',
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'WINNT_6.1_x86-64_%(branch)s_Artifact_build',
+ 'platform': 'win64',
+ 'stage_platform': 'win64',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.0b1;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/8.0/Debuggers/x64/srcsrv/pdbstr.exe',
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win64/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug.py
new file mode 100644
index 000000000..e8145dea9
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug.py
@@ -0,0 +1,39 @@
+import os
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'upload-files',
+ 'sendchange',
+ 'check-test',
+ 'generate-build-stats',
+ 'update', # decided by query_is_nightly()
+ ],
+ 'stage_platform': 'win64-debug',
+ 'debug_build': True,
+ 'enable_talos_sendchange': False,
+ #### 64 bit build specific #####
+ 'env': {
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PATH': 'C:/mozilla-build/nsis-3.01;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/debug',
+ #######################
+}
diff --git a/testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug_artifact.py b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug_artifact.py
new file mode 100644
index 000000000..892a6622d
--- /dev/null
+++ b/testing/mozharness/configs/builds/releng_sub_windows_configs/64_debug_artifact.py
@@ -0,0 +1,85 @@
+import os
+import sys
+
+MOZ_OBJDIR = 'obj-firefox'
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced with
+ # releng_base_windows_64_builds.py
+
+ 'default_actions': [
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ # 'setup-mock', windows do not use mock
+ 'build',
+ 'sendchange',
+ ],
+ "buildbot_json_path": "buildprops.json",
+ 'exes': {
+ 'python2.7': sys.executable,
+ "buildbot": [
+ sys.executable,
+ 'c:\\mozilla-build\\buildbotve\\scripts\\buildbot'
+ ],
+ "make": [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': False,
+ 'enable_ccache': False,
+ 'vcs_share_base': 'C:/builds/hg-shared',
+ 'objdir': MOZ_OBJDIR,
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'enable_count_ctors': False,
+ # debug specific
+ 'debug_build': True,
+ 'enable_talos_sendchange': False,
+ # allows triggering of test jobs when --artifact try syntax is detected on buildbot
+ 'enable_unittest_sendchange': True,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'WINNT_6.1_x86-64_%(branch)s_Artifact_build',
+ 'platform': 'win64',
+ 'stage_platform': 'win64-debug',
+ 'publish_nightly_en_US_routes': False,
+ 'env': {
+ 'BINSCOPE': 'C:/Program Files (x86)/Microsoft/SDL BinScope/BinScope.exe',
+ 'MOZ_AUTOMATION': '1',
+ 'HG_SHARE_BASE_DIR': 'C:/builds/hg-shared',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': MOZ_OBJDIR,
+ 'PATH': 'C:/mozilla-build/nsis-3.0b1;C:/mozilla-build/python27;'
+ 'C:/mozilla-build/buildbotve/scripts;'
+ '%s' % (os.environ.get('path')),
+ 'PROPERTIES_FILE': os.path.join(os.getcwd(), 'buildprops.json'),
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ # debug-specific
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser/config/mozconfigs/win64/debug-artifact',
+ 'tooltool_manifest_src': "browser/config/tooltool-manifests/win64/releng.manifest",
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/taskcluster_firefox_win32_debug.py b/testing/mozharness/configs/builds/taskcluster_firefox_win32_debug.py
new file mode 100644
index 000000000..ed53474ad
--- /dev/null
+++ b/testing/mozharness/configs/builds/taskcluster_firefox_win32_debug.py
@@ -0,0 +1,91 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced between:
+ # - taskcluster_firefox_win32_debug
+ # - taskcluster_firefox_win32_opt
+ # - taskcluster_firefox_win64_debug
+ # - taskcluster_firefox_win64_opt
+
+ 'default_actions': [
+ 'clone-tools',
+ 'build',
+ 'check-test',
+ ],
+ 'exes': {
+ 'python2.7': sys.executable,
+ 'make': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'python', 'virtualenv', 'virtualenv.py'
+ )
+ ],
+ 'mach-build': [
+ os.path.join(os.environ['MOZILLABUILD'], 'msys', 'bin', 'bash.exe'),
+ os.path.join(os.getcwd(), 'build', 'src', 'mach'),
+ '--log-no-times', 'build', '-v'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': False,
+ 'vcs_share_base': os.path.join('y:', os.sep, 'hg-shared'),
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [
+ sys.executable,
+ os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')
+ ],
+ 'tooltool_bootstrap': 'setup.sh',
+ 'enable_count_ctors': False,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'WINNT_5.2_%(branch)s',
+ 'platform': 'win32',
+ 'stage_platform': 'win32-debug',
+ 'debug_build': True,
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'BINSCOPE': os.path.join(
+ os.environ['ProgramFiles(x86)'], 'Microsoft', 'SDL BinScope', 'BinScope.exe'
+ ),
+ 'HG_SHARE_BASE_DIR': os.path.join('y:', os.sep, 'hg-shared'),
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/10/Debuggers/x86/srcsrv/pdbstr.exe',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'MSYSTEM': 'MINGW32',
+ },
+ 'upload_env': {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': os.path.join(os.getcwd(), 'public', 'build'),
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s\\breakpad\\win32\\minidump_stackwalk.exe',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s\\minidumps',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser\\config\\mozconfigs\\win32\\debug',
+ 'tooltool_manifest_src': 'browser\\config\\tooltool-manifests\\win32\\releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/taskcluster_firefox_win32_opt.py b/testing/mozharness/configs/builds/taskcluster_firefox_win32_opt.py
new file mode 100644
index 000000000..4a6502dce
--- /dev/null
+++ b/testing/mozharness/configs/builds/taskcluster_firefox_win32_opt.py
@@ -0,0 +1,89 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 32 bit keys/values please add them
+ # below under the '32 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced between:
+ # - taskcluster_firefox_win32_debug
+ # - taskcluster_firefox_win32_opt
+ # - taskcluster_firefox_win64_debug
+ # - taskcluster_firefox_win64_opt
+
+ 'default_actions': [
+ 'clone-tools',
+ 'build',
+ 'check-test',
+ ],
+ 'exes': {
+ 'python2.7': sys.executable,
+ 'make': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'python', 'virtualenv', 'virtualenv.py'
+ )
+ ],
+ 'mach-build': [
+ os.path.join(os.environ['MOZILLABUILD'], 'msys', 'bin', 'bash.exe'),
+ os.path.join(os.getcwd(), 'build', 'src', 'mach'),
+ '--log-no-times', 'build', '-v'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': False,
+ 'vcs_share_base': os.path.join('y:', os.sep, 'hg-shared'),
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [
+ sys.executable,
+ os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')
+ ],
+ 'tooltool_bootstrap': 'setup.sh',
+ 'enable_count_ctors': False,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 32 bit specific ######
+ 'base_name': 'WINNT_5.2_%(branch)s',
+ 'platform': 'win32',
+ 'stage_platform': 'win32',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'BINSCOPE': os.path.join(
+ os.environ['ProgramFiles(x86)'], 'Microsoft', 'SDL BinScope', 'BinScope.exe'
+ ),
+ 'HG_SHARE_BASE_DIR': os.path.join('y:', os.sep, 'hg-shared'),
+ 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/10/Debuggers/x86/srcsrv/pdbstr.exe',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ 'MSYSTEM': 'MINGW32',
+ },
+ 'upload_env': {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': os.path.join(os.getcwd(), 'public', 'build'),
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s\\breakpad\\win32\\minidump_stackwalk.exe',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s\\minidumps',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser\\config\\mozconfigs\\win32\\nightly',
+ 'tooltool_manifest_src': 'browser\\config\\tooltool-manifests\\win32\\releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/taskcluster_firefox_win64_debug.py b/testing/mozharness/configs/builds/taskcluster_firefox_win64_debug.py
new file mode 100644
index 000000000..687cf13c6
--- /dev/null
+++ b/testing/mozharness/configs/builds/taskcluster_firefox_win64_debug.py
@@ -0,0 +1,87 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 64 bit keys/values please add them
+ # below under the '64 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced between:
+ # - taskcluster_firefox_win32_debug
+ # - taskcluster_firefox_win32_opt
+ # - taskcluster_firefox_win64_debug
+ # - taskcluster_firefox_win64_opt
+
+ 'default_actions': [
+ 'clone-tools',
+ 'build',
+ 'check-test',
+ ],
+ 'exes': {
+ 'python2.7': sys.executable,
+ 'make': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'python', 'virtualenv', 'virtualenv.py'
+ )
+ ],
+ 'mach-build': [
+ os.path.join(os.environ['MOZILLABUILD'], 'msys', 'bin', 'bash.exe'),
+ os.path.join(os.getcwd(), 'build', 'src', 'mach'),
+ '--log-no-times', 'build', '-v'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': False,
+ 'vcs_share_base': os.path.join('y:', os.sep, 'hg-shared'),
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [
+ sys.executable,
+ os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')
+ ],
+ 'tooltool_bootstrap': 'setup.sh',
+ 'enable_count_ctors': False,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'WINNT_6.1_x86-64_%(branch)s',
+ 'platform': 'win64',
+ 'stage_platform': 'win64-debug',
+ 'debug_build': True,
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'HG_SHARE_BASE_DIR': os.path.join('y:', os.sep, 'hg-shared'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/10/Debuggers/x64/srcsrv/pdbstr.exe',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ 'XPCOM_DEBUG_BREAK': 'stack-and-abort',
+ 'MSYSTEM': 'MINGW32',
+ },
+ 'upload_env': {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': os.path.join(os.getcwd(), 'public', 'build'),
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s\\breakpad\\win64\\minidump_stackwalk.exe',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s\\minidumps',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser\\config\\mozconfigs\\win64\\debug',
+ 'tooltool_manifest_src': 'browser\\config\\tooltool-manifests\\win64\\releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/builds/taskcluster_firefox_win64_opt.py b/testing/mozharness/configs/builds/taskcluster_firefox_win64_opt.py
new file mode 100644
index 000000000..ba9cc9350
--- /dev/null
+++ b/testing/mozharness/configs/builds/taskcluster_firefox_win64_opt.py
@@ -0,0 +1,85 @@
+import os
+import sys
+
+config = {
+ #########################################################################
+ ######## WINDOWS GENERIC CONFIG KEYS/VAlUES
+ # if you are updating this with custom 64 bit keys/values please add them
+ # below under the '64 bit specific' code block otherwise, update in this
+ # code block and also make sure this is synced between:
+ # - taskcluster_firefox_win32_debug
+ # - taskcluster_firefox_win32_opt
+ # - taskcluster_firefox_win64_debug
+ # - taskcluster_firefox_win64_opt
+
+ 'default_actions': [
+ 'clone-tools',
+ 'build',
+ 'check-test',
+ ],
+ 'exes': {
+ 'python2.7': sys.executable,
+ 'make': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'build', 'pymake', 'make.py'
+ )
+ ],
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(
+ os.getcwd(), 'build', 'src', 'python', 'virtualenv', 'virtualenv.py'
+ )
+ ],
+ 'mach-build': [
+ os.path.join(os.environ['MOZILLABUILD'], 'msys', 'bin', 'bash.exe'),
+ os.path.join(os.getcwd(), 'build', 'src', 'mach'),
+ '--log-no-times', 'build', '-v'
+ ],
+ },
+ 'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
+ # decides whether we want to use moz_sign_cmd in env
+ 'enable_signing': True,
+ 'enable_ccache': False,
+ 'vcs_share_base': os.path.join('y:', os.sep, 'hg-shared'),
+ 'objdir': 'obj-firefox',
+ 'tooltool_script': [
+ sys.executable,
+ os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')
+ ],
+ 'tooltool_bootstrap': 'setup.sh',
+ 'enable_count_ctors': False,
+ 'max_build_output_timeout': 60 * 80,
+ #########################################################################
+
+
+ #########################################################################
+ ###### 64 bit specific ######
+ 'base_name': 'WINNT_6.1_x86-64_%(branch)s',
+ 'platform': 'win64',
+ 'stage_platform': 'win64',
+ 'publish_nightly_en_US_routes': True,
+ 'env': {
+ 'HG_SHARE_BASE_DIR': os.path.join('y:', os.sep, 'hg-shared'),
+ 'MOZ_AUTOMATION': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_OBJDIR': 'obj-firefox',
+ 'PDBSTR_PATH': '/c/Program Files (x86)/Windows Kits/10/Debuggers/x64/srcsrv/pdbstr.exe',
+ 'TINDERBOX_OUTPUT': '1',
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ 'MSYSTEM': 'MINGW32',
+ },
+ 'upload_env': {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': os.path.join(os.getcwd(), 'public', 'build'),
+ },
+ "check_test_env": {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s\\breakpad\\win64\\minidump_stackwalk.exe',
+ 'MINIDUMP_SAVE_PATH': '%(base_work_dir)s\\minidumps',
+ },
+ 'enable_pymake': True,
+ 'src_mozconfig': 'browser\\config\\mozconfigs\\win64\\nightly',
+ 'tooltool_manifest_src': 'browser\\config\\tooltool-manifests\\win64\\releng.manifest',
+ #########################################################################
+}
diff --git a/testing/mozharness/configs/developer_config.py b/testing/mozharness/configs/developer_config.py
new file mode 100644
index 000000000..49ddb6eb7
--- /dev/null
+++ b/testing/mozharness/configs/developer_config.py
@@ -0,0 +1,49 @@
+"""
+This config file can be appended to any other mozharness job
+running under treeherder. The purpose of this config is to
+override values that are specific to Release Engineering machines
+that can reach specific hosts within their network.
+In other words, this config allows you to run any job
+outside of the Release Engineering network
+
+Using this config file should be accompanied with using
+--test-url and --installer-url where appropiate
+"""
+
+import os
+LOCAL_WORKDIR = os.path.expanduser("~/.mozilla/releng")
+
+config = {
+ # Developer mode values
+ "developer_mode": True,
+ "local_workdir": LOCAL_WORKDIR,
+ "replace_urls": [
+ ("http://pvtbuilds.pvt.build", "https://pvtbuilds"),
+ ],
+
+ # General local variable overwrite
+ "exes": {
+ "gittool.py": os.path.join(LOCAL_WORKDIR, "gittool.py"),
+ },
+
+ # Pip
+ "find_links": ["http://pypi.pub.build.mozilla.org/pub"],
+ "pip_index": False,
+
+ # Talos related
+ "python_webserver": True,
+ "virtualenv_path": '%s/build/venv' % os.getcwd(),
+ "preflight_run_cmd_suites": [],
+ "postflight_run_cmd_suites": [],
+
+ # Tooltool related
+ "download_tooltool": True,
+ "tooltool_cache": os.path.join(LOCAL_WORKDIR, "builds/tooltool_cache"),
+ "tooltool_cache_path": os.path.join(LOCAL_WORKDIR, "builds/tooltool_cache"),
+
+ # VCS tools
+ "gittool.py": 'http://hg.mozilla.org/build/puppet/raw-file/faaf5abd792e/modules/packages/files/gittool.py',
+
+ # Android related
+ "host_utils_url": "https://api.pub.build.mozilla.org/tooltool/sha512/372c89f9dccaf5ee3b9d35fd1cfeb089e1e5db3ff1c04e35aa3adc8800bc61a2ae10e321f37ae7bab20b56e60941f91bb003bcb22035902a73d70872e7bd3282",
+}
diff --git a/testing/mozharness/configs/disable_signing.py b/testing/mozharness/configs/disable_signing.py
new file mode 100644
index 000000000..77fc85f2d
--- /dev/null
+++ b/testing/mozharness/configs/disable_signing.py
@@ -0,0 +1,3 @@
+config = {
+ 'enable_signing': False,
+}
diff --git a/testing/mozharness/configs/firefox_ui_tests/qa_jenkins.py b/testing/mozharness/configs/firefox_ui_tests/qa_jenkins.py
new file mode 100644
index 000000000..5f6911b81
--- /dev/null
+++ b/testing/mozharness/configs/firefox_ui_tests/qa_jenkins.py
@@ -0,0 +1,19 @@
+# Default configuration as used by Mozmill CI (Jenkins)
+
+
+config = {
+ # Tests run in mozmill-ci do not use RelEng infra
+ 'developer_mode': True,
+
+ # PIP
+ 'find_links': ['http://pypi.pub.build.mozilla.org/pub'],
+ 'pip_index': False,
+
+ # mozcrash support
+ 'download_minidump_stackwalk': True,
+ 'download_symbols': 'ondemand',
+ 'download_tooltool': True,
+
+ # Disable proxxy because it isn't present in the QA environment.
+ 'proxxy': {},
+}
diff --git a/testing/mozharness/configs/firefox_ui_tests/releng_release.py b/testing/mozharness/configs/firefox_ui_tests/releng_release.py
new file mode 100644
index 000000000..28baf6aef
--- /dev/null
+++ b/testing/mozharness/configs/firefox_ui_tests/releng_release.py
@@ -0,0 +1,33 @@
+# Default configuration as used by Release Engineering for testing release/beta builds
+
+import os
+import sys
+
+import mozharness
+
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+
+config = {
+ # General local variable overwrite
+ 'exes': {
+ 'gittool.py': [
+ # Bug 1227079 - Python executable eeded to get it executed on Windows
+ sys.executable,
+ os.path.join(external_tools_path, 'gittool.py')
+ ],
+ },
+
+ # PIP
+ 'find_links': ['http://pypi.pub.build.mozilla.org/pub'],
+ 'pip_index': False,
+
+ # mozcrash support
+ 'download_minidump_stackwalk': True,
+ 'download_symbols': 'ondemand',
+ 'download_tooltool': True,
+}
diff --git a/testing/mozharness/configs/firefox_ui_tests/taskcluster.py b/testing/mozharness/configs/firefox_ui_tests/taskcluster.py
new file mode 100644
index 000000000..66fc72935
--- /dev/null
+++ b/testing/mozharness/configs/firefox_ui_tests/taskcluster.py
@@ -0,0 +1,11 @@
+# Config file for firefox ui tests run via TaskCluster.
+
+config = {
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+
+ "pip_index": False,
+
+ "tooltool_cache": "/builds/tooltool_cache",
+}
diff --git a/testing/mozharness/configs/hazards/build_browser.py b/testing/mozharness/configs/hazards/build_browser.py
new file mode 100644
index 000000000..a08efe925
--- /dev/null
+++ b/testing/mozharness/configs/hazards/build_browser.py
@@ -0,0 +1,4 @@
+config = {
+ 'build_command': "build.browser",
+ 'expect_file': "expect.browser.json",
+}
diff --git a/testing/mozharness/configs/hazards/build_shell.py b/testing/mozharness/configs/hazards/build_shell.py
new file mode 100644
index 000000000..16135705a
--- /dev/null
+++ b/testing/mozharness/configs/hazards/build_shell.py
@@ -0,0 +1,4 @@
+config = {
+ 'build_command': "build.shell",
+ 'expect_file': "expect.shell.json",
+}
diff --git a/testing/mozharness/configs/hazards/common.py b/testing/mozharness/configs/hazards/common.py
new file mode 100644
index 000000000..f8d751044
--- /dev/null
+++ b/testing/mozharness/configs/hazards/common.py
@@ -0,0 +1,104 @@
+import os
+
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+PYTHON_DIR = "/tools/python27"
+SRCDIR = "source"
+
+config = {
+ "platform": "linux64",
+ "build_type": "br-haz",
+ "log_name": "hazards",
+ "shell-objdir": "obj-opt-js",
+ "analysis-dir": "analysis",
+ "analysis-objdir": "obj-analyzed",
+ "srcdir": SRCDIR,
+ "analysis-scriptdir": "js/src/devtools/rootAnalysis",
+
+ # These paths are relative to the tooltool checkout location
+ "sixgill": "sixgill/usr/libexec/sixgill",
+ "sixgill_bin": "sixgill/usr/bin",
+
+ "python": "python",
+
+ "exes": {
+ 'gittool.py': '%(abs_tools_dir)s/buildfarm/utils/gittool.py',
+ 'tooltool.py': '/tools/tooltool.py',
+ "virtualenv": [PYTHON_DIR + "/bin/python", "/tools/misc-python/virtualenv.py"],
+ },
+
+ "force_clobber": True,
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+
+ "repos": [{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+
+ "upload_remote_baseuri": 'https://ftp-ssl.mozilla.org/',
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+
+ "virtualenv_path": '%s/venv' % os.getcwd(),
+ 'tools_dir': "/tools",
+ 'compiler_manifest': "build/gcc.manifest",
+ 'b2g_compiler_manifest': "build/gcc-b2g.manifest",
+ 'sixgill_manifest': "build/sixgill.manifest",
+
+ # Mock.
+ "mock_packages": [
+ "autoconf213", "mozilla-python27-mercurial", "ccache",
+ "zip", "zlib-devel", "glibc-static",
+ "openssh-clients", "mpfr", "wget", "rsync",
+
+ # For building the JS shell
+ "gmp-devel", "nspr", "nspr-devel",
+
+ # For building the browser
+ "dbus-devel", "dbus-glib-devel", "hal-devel",
+ "libICE-devel", "libIDL-devel",
+
+ # For mach resource-usage
+ "python-psutil",
+
+ 'zip', 'git',
+ 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'pulseaudio-libs-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ("/tools/tooltool.py", "/tools/tooltool.py"),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+ "env_replacements": {
+ "pythondir": PYTHON_DIR,
+ "gccdir": "%(abs_work_dir)s/gcc",
+ "sixgilldir": "%(abs_work_dir)s/sixgill",
+ },
+ "partial_env": {
+ "PATH": "%(pythondir)s/bin:%(gccdir)s/bin:%(PATH)s",
+ "LD_LIBRARY_PATH": "%(sixgilldir)s/usr/lib64",
+
+ # Suppress the mercurial-setup check. When running in automation, this
+ # is redundant with MOZ_AUTOMATION, but a local developer-mode build
+ # will have the mach state directory set to a nonstandard location and
+ # therefore will always claim that mercurial-setup has not been run.
+ "I_PREFER_A_SUBOPTIMAL_MERCURIAL_EXPERIENCE": "1",
+ },
+}
diff --git a/testing/mozharness/configs/marionette/prod_config.py b/testing/mozharness/configs/marionette/prod_config.py
new file mode 100644
index 000000000..0d71c1cc3
--- /dev/null
+++ b/testing/mozharness/configs/marionette/prod_config.py
@@ -0,0 +1,56 @@
+# This is a template config file for marionette production.
+import os
+
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ # marionette options
+ "marionette_address": "localhost:2828",
+ "test_manifest": "unit-tests.ini",
+
+ "vcs_share_base": HG_SHARE_BASE_DIR,
+ "exes": {
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ "buildbot_json_path": "buildprops.json",
+
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+ "download_symbols": "ondemand",
+ "download_minidump_stackwalk": True,
+ "tooltool_cache": "/builds/tooltool_cache",
+ "suite_definitions": {
+ "marionette_desktop": {
+ "options": [
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--log-html=%(html_report_file)s",
+ "--binary=%(binary)s",
+ "--address=%(address)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "",
+ "testsdir": ""
+ }
+ },
+ "structured_output": True,
+}
diff --git a/testing/mozharness/configs/marionette/test_config.py b/testing/mozharness/configs/marionette/test_config.py
new file mode 100644
index 000000000..6a0f3eee3
--- /dev/null
+++ b/testing/mozharness/configs/marionette/test_config.py
@@ -0,0 +1,29 @@
+# This is a template config file for marionette test.
+
+config = {
+ # marionette options
+ "marionette_address": "localhost:2828",
+ "test_manifest": "unit-tests.ini",
+
+ "default_actions": [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ ],
+ "suite_definitions": {
+ "marionette_desktop": {
+ "options": [
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--log-html=%(html_report_file)s",
+ "--binary=%(binary)s",
+ "--address=%(address)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "",
+ "testsdir": ""
+ },
+ },
+}
diff --git a/testing/mozharness/configs/marionette/windows_config.py b/testing/mozharness/configs/marionette/windows_config.py
new file mode 100644
index 000000000..039a459b2
--- /dev/null
+++ b/testing/mozharness/configs/marionette/windows_config.py
@@ -0,0 +1,57 @@
+# This is a template config file for marionette production on Windows.
+import os
+import sys
+
+config = {
+ # marionette options
+ "marionette_address": "localhost:2828",
+ "test_manifest": "unit-tests.ini",
+
+ "virtualenv_python_dll": 'c:/mozilla-build/python27/python27.dll',
+ "virtualenv_path": 'venv',
+ "exes": {
+ 'python': 'c:/mozilla-build/python27/python',
+ 'virtualenv': ['c:/mozilla-build/python27/python', 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ 'hg': 'c:/mozilla-build/hg/hg',
+ 'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
+ '%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
+ 'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
+ },
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ "buildbot_json_path": "buildprops.json",
+
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "download_symbols": "ondemand",
+ "suite_definitions": {
+ "marionette_desktop": {
+ "options": [
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--log-html=%(html_report_file)s",
+ "--binary=%(binary)s",
+ "--address=%(address)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "",
+ "testsdir": ""
+ },
+ },
+}
diff --git a/testing/mozharness/configs/marionette/windows_taskcluster_config.py b/testing/mozharness/configs/marionette/windows_taskcluster_config.py
new file mode 100644
index 000000000..fe3ed0c62
--- /dev/null
+++ b/testing/mozharness/configs/marionette/windows_taskcluster_config.py
@@ -0,0 +1,56 @@
+# This is a template config file for marionette production on Windows.
+import os
+import sys
+
+config = {
+ # marionette options
+ "marionette_address": "localhost:2828",
+ "test_manifest": "unit-tests.ini",
+
+ "virtualenv_python_dll": os.path.join(os.path.dirname(sys.executable), 'python27.dll'),
+ "virtualenv_path": 'venv',
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(os.path.dirname(sys.executable), 'Lib', 'site-packages', 'virtualenv.py')
+ ],
+ 'mozinstall': ['build/venv/scripts/python', 'build/venv/scripts/mozinstall-script.py'],
+ 'tooltool.py': [sys.executable, os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')],
+ 'hg': os.path.join(os.environ['PROGRAMFILES'], 'Mercurial', 'hg')
+ },
+
+ "proxxy": {},
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ "default_actions": [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : 'C:/builds/oauth.txt',
+ "download_minidump_stackwalk": True,
+ "download_symbols": "ondemand",
+ "suite_definitions": {
+ "marionette_desktop": {
+ "options": [
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--log-html=%(html_report_file)s",
+ "--binary=%(binary)s",
+ "--address=%(address)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "",
+ "testsdir": ""
+ },
+ },
+}
diff --git a/testing/mozharness/configs/mediatests/buildbot_posix_config.py b/testing/mozharness/configs/mediatests/buildbot_posix_config.py
new file mode 100644
index 000000000..8c30a9f28
--- /dev/null
+++ b/testing/mozharness/configs/mediatests/buildbot_posix_config.py
@@ -0,0 +1,50 @@
+import os
+import mozharness
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+config = {
+ "virtualenv_path": 'venv',
+ "exes": {
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ "buildbot_json_path": "buildprops.json",
+
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "download_symbols": "ondemand",
+
+ "suite_definitions": {
+ "media-tests": {
+ "options": [],
+ },
+ "media-youtube-tests": {
+ "options": [
+ "%(test_manifest)s"
+ ],
+ },
+ },
+}
diff --git a/testing/mozharness/configs/mediatests/buildbot_windows_config.py b/testing/mozharness/configs/mediatests/buildbot_windows_config.py
new file mode 100644
index 000000000..270938378
--- /dev/null
+++ b/testing/mozharness/configs/mediatests/buildbot_windows_config.py
@@ -0,0 +1,56 @@
+import os
+import sys
+import mozharness
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+config = {
+ "virtualenv_python_dll": 'c:/mozilla-build/python27/python27.dll',
+ "virtualenv_path": 'venv',
+ "exes": {
+ 'python': 'c:/mozilla-build/python27/python',
+ 'virtualenv': ['c:/mozilla-build/python27/python', 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ 'hg': 'c:/mozilla-build/hg/hg',
+ 'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
+ '%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
+ 'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
+ },
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ "buildbot_json_path": "buildprops.json",
+
+ "default_actions": [
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+ "in_tree_config": "config/mozharness/marionette.py",
+ "download_minidump_stackwalk": True,
+ "download_symbols": "ondemand",
+
+ "suite_definitions": {
+ "media-tests": {
+ "options": [],
+ },
+ "media-youtube-tests": {
+ "options": [
+ "%(test_manifest)s"
+ ],
+ },
+ },
+}
diff --git a/testing/mozharness/configs/mediatests/jenkins_config.py b/testing/mozharness/configs/mediatests/jenkins_config.py
new file mode 100755
index 000000000..52de7221d
--- /dev/null
+++ b/testing/mozharness/configs/mediatests/jenkins_config.py
@@ -0,0 +1,48 @@
+# Default configuration as used by Mozmill CI (Jenkins)
+
+import os
+import platform
+import sys
+
+import mozharness
+
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+config = {
+ # PIP
+ 'find_links': ['http://pypi.pub.build.mozilla.org/pub'],
+ 'pip_index': False,
+
+ # mozcrash support
+ 'download_minidump_stackwalk': True,
+ 'download_symbols': 'ondemand',
+ 'download_tooltool': True,
+
+ # Default test suite
+ 'test_suite': 'media-tests',
+
+ 'suite_definitions': {
+ 'media-tests': {
+ 'options': [],
+ },
+ 'media-youtube-tests': {
+ 'options': [
+ '%(test_manifest)s'
+ ],
+ },
+ },
+
+ 'default_actions': [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+
+}
+
diff --git a/testing/mozharness/configs/mediatests/taskcluster_posix_config.py b/testing/mozharness/configs/mediatests/taskcluster_posix_config.py
new file mode 100644
index 000000000..d02effa3d
--- /dev/null
+++ b/testing/mozharness/configs/mediatests/taskcluster_posix_config.py
@@ -0,0 +1,47 @@
+import os
+import mozharness
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+config = {
+ # Python env
+ "virtualenv_path": 'venv',
+ "exes": {
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+
+ # PIP
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ #mozcrash support
+ "download_minidump_stackwalk": True,
+ "download_symbols": "ondemand",
+
+ "default_actions": [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+
+ "suite_definitions": {
+ "media-tests": {
+ "options": [],
+ },
+ "media-youtube-tests": {
+ "options": [
+ "%(test_manifest)s"
+ ],
+ },
+ },
+}
diff --git a/testing/mozharness/configs/mediatests/taskcluster_windows_config.py b/testing/mozharness/configs/mediatests/taskcluster_windows_config.py
new file mode 100644
index 000000000..85bf8b525
--- /dev/null
+++ b/testing/mozharness/configs/mediatests/taskcluster_windows_config.py
@@ -0,0 +1,50 @@
+import os
+import sys
+import mozharness
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+config = {
+ "virtualenv_python_dll": os.path.join(os.path.dirname(sys.executable), 'python27.dll'),
+ "virtualenv_path": 'venv',
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(os.path.dirname(sys.executable), 'Lib', 'site-packages', 'virtualenv.py')
+ ],
+ 'mozinstall': ['build/venv/scripts/python', 'build/venv/scripts/mozinstall-script.py'],
+ 'tooltool.py': [sys.executable, os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')],
+ 'hg': os.path.join(os.environ['PROGRAMFILES'], 'Mercurial', 'hg')
+ },
+ "proxxy": {},
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+
+ "download_minidump_stackwalk": True,
+ "download_symbols": "ondemand",
+
+ "default_actions": [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+
+ "suite_definitions": {
+ "media-tests": {
+ "options": [],
+ },
+ "media-youtube-tests": {
+ "options": [
+ "%(test_manifest)s"
+ ],
+ },
+ },
+}
diff --git a/testing/mozharness/configs/merge_day/aurora_to_beta.py b/testing/mozharness/configs/merge_day/aurora_to_beta.py
new file mode 100644
index 000000000..dc1fc4c83
--- /dev/null
+++ b/testing/mozharness/configs/merge_day/aurora_to_beta.py
@@ -0,0 +1,83 @@
+import os
+
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+
+config = {
+ "log_name": "aurora_to_beta",
+ "version_files": [
+ {"file": "browser/config/version.txt", "suffix": ""},
+ {"file": "browser/config/version_display.txt", "suffix": "b1"},
+ {"file": "config/milestone.txt", "suffix": ""},
+ ],
+ "replacements": [
+ # File, from, to
+ ("{}/{}".format(d, f),
+ "ac_add_options --with-branding=mobile/android/branding/aurora",
+ "ac_add_options --with-branding=mobile/android/branding/beta")
+ for d in ["mobile/android/config/mozconfigs/android-api-15/",
+ "mobile/android/config/mozconfigs/android-x86/"]
+ for f in ["debug", "nightly", "l10n-nightly"]
+ ] + [
+ # File, from, to
+ ("{}/{}".format(d, f),
+ "ac_add_options --with-branding=browser/branding/aurora",
+ "ac_add_options --with-branding=browser/branding/nightly")
+ for d in ["browser/config/mozconfigs/linux32",
+ "browser/config/mozconfigs/linux64",
+ "browser/config/mozconfigs/win32",
+ "browser/config/mozconfigs/win64",
+ "browser/config/mozconfigs/macosx64"]
+ for f in ["debug", "nightly"]
+ ] + [
+ # File, from, to
+ (f, "ac_add_options --with-branding=browser/branding/aurora",
+ "ac_add_options --enable-official-branding")
+ for f in ["browser/config/mozconfigs/linux32/l10n-mozconfig",
+ "browser/config/mozconfigs/linux64/l10n-mozconfig",
+ "browser/config/mozconfigs/win32/l10n-mozconfig",
+ "browser/config/mozconfigs/win64/l10n-mozconfig",
+ "browser/config/mozconfigs/macosx-universal/l10n-mozconfig",
+ "browser/config/mozconfigs/macosx64/l10n-mozconfig"]
+ ] + [
+ ("browser/config/mozconfigs/macosx-universal/nightly",
+ "ac_add_options --with-branding=browser/branding/aurora",
+ "ac_add_options --with-branding=browser/branding/nightly"),
+ ("browser/confvars.sh",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-aurora",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release"),
+ ("browser/confvars.sh",
+ "MAR_CHANNEL_ID=firefox-mozilla-aurora",
+ "MAR_CHANNEL_ID=firefox-mozilla-beta"),
+ ("browser/config/mozconfigs/whitelist",
+ "ac_add_options --with-branding=browser/branding/aurora",
+ "ac_add_options --with-branding=browser/branding/nightly"),
+ ] + [
+ ("build/mozconfig.common",
+ "MOZ_REQUIRE_SIGNING=${MOZ_REQUIRE_SIGNING-0}",
+ "MOZ_REQUIRE_SIGNING=${MOZ_REQUIRE_SIGNING-1}"),
+ ("build/mozconfig.common",
+ "# Disable enforcing that add-ons are signed by the trusted root",
+ "# Enable enforcing that add-ons are signed by the trusted root")
+ ],
+
+ "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'),
+ # "hg_share_base": None,
+ "tools_repo_url": "https://hg.mozilla.org/build/tools",
+ "tools_repo_branch": "default",
+ "from_repo_url": "ssh://hg.mozilla.org/releases/mozilla-aurora",
+ "to_repo_url": "ssh://hg.mozilla.org/releases/mozilla-beta",
+
+ "base_tag": "FIREFOX_BETA_%(major_version)s_BASE",
+ "end_tag": "FIREFOX_BETA_%(major_version)s_END",
+
+ "migration_behavior": "aurora_to_beta",
+
+ "virtualenv_modules": [
+ "requests==2.8.1",
+ ],
+
+ "post_merge_builders": [],
+ "post_merge_nightly_branches": [
+ # No nightlies on mozilla-beta
+ ],
+}
diff --git a/testing/mozharness/configs/merge_day/beta_to_release.py b/testing/mozharness/configs/merge_day/beta_to_release.py
new file mode 100644
index 000000000..0316272bf
--- /dev/null
+++ b/testing/mozharness/configs/merge_day/beta_to_release.py
@@ -0,0 +1,53 @@
+import os
+
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+
+config = {
+ "log_name": "beta_to_release",
+ "copy_files": [
+ {
+ "src": "browser/config/version.txt",
+ "dst": "browser/config/version_display.txt",
+ },
+ ],
+ "replacements": [
+ # File, from, to
+ ("{}/{}".format(d, f),
+ "ac_add_options --with-branding=mobile/android/branding/beta",
+ "ac_add_options --with-branding=mobile/android/branding/official")
+ for d in ["mobile/android/config/mozconfigs/android-api-15/",
+ "mobile/android/config/mozconfigs/android-x86/"]
+ for f in ["debug", "nightly", "l10n-nightly", "l10n-release", "release"]
+ ] + [
+ # File, from, to
+ ("browser/confvars.sh",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release"),
+ ("browser/confvars.sh",
+ "MAR_CHANNEL_ID=firefox-mozilla-beta",
+ "MAR_CHANNEL_ID=firefox-mozilla-release"),
+ ],
+
+ "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'),
+ # "hg_share_base": None,
+ "tools_repo_url": "https://hg.mozilla.org/build/tools",
+ "tools_repo_branch": "default",
+ "from_repo_url": "ssh://hg.mozilla.org/releases/mozilla-beta",
+ "to_repo_url": "ssh://hg.mozilla.org/releases/mozilla-release",
+
+ "base_tag": "FIREFOX_RELEASE_%(major_version)s_BASE",
+ "end_tag": "FIREFOX_RELEASE_%(major_version)s_END",
+
+ "migration_behavior": "beta_to_release",
+ "require_remove_locales": False,
+ "pull_all_branches": True,
+
+ "virtualenv_modules": [
+ "requests==2.8.1",
+ ],
+
+ "post_merge_builders": [],
+ "post_merge_nightly_branches": [
+ # No nightlies on mozilla-release
+ ],
+}
diff --git a/testing/mozharness/configs/merge_day/bump_esr.py b/testing/mozharness/configs/merge_day/bump_esr.py
new file mode 100644
index 000000000..48ab2e9de
--- /dev/null
+++ b/testing/mozharness/configs/merge_day/bump_esr.py
@@ -0,0 +1,24 @@
+import os
+
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+config = {
+ "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'),
+ "log_name": "bump_esr",
+ "version_files": [
+ {"file": "browser/config/version.txt", "suffix": ""},
+ {"file": "browser/config/version_display.txt", "suffix": ""},
+ {"file": "config/milestone.txt", "suffix": ""},
+ ],
+ "tools_repo_url": "https://hg.mozilla.org/build/tools",
+ "tools_repo_branch": "default",
+ "to_repo_url": "ssh://hg.mozilla.org/releases/mozilla-esr52",
+
+ "migration_behavior": "bump_second_digit",
+ "require_remove_locales": False,
+ "requires_head_merge": False,
+ "default_actions": [
+ "clean-repos",
+ "pull",
+ "bump_second_digit"
+ ],
+}
diff --git a/testing/mozharness/configs/merge_day/central_to_aurora.py b/testing/mozharness/configs/merge_day/central_to_aurora.py
new file mode 100644
index 000000000..36347f667
--- /dev/null
+++ b/testing/mozharness/configs/merge_day/central_to_aurora.py
@@ -0,0 +1,100 @@
+import os
+
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+config = {
+ "log_name": "central_to_aurora",
+ "version_files": [
+ {"file": "browser/config/version.txt", "suffix": ""},
+ {"file": "browser/config/version_display.txt", "suffix": ""},
+ {"file": "config/milestone.txt", "suffix": ""},
+ ],
+ "replacements": [
+ # File, from, to
+ ("{}/{}".format(d, f),
+ "ac_add_options --with-branding=mobile/android/branding/nightly",
+ "ac_add_options --with-branding=mobile/android/branding/aurora")
+ for d in ["mobile/android/config/mozconfigs/android-api-15/",
+ "mobile/android/config/mozconfigs/android-x86/"]
+ for f in ["debug", "nightly", "l10n-nightly"]
+ ] + [
+ # File, from, to
+ ("{}/{}".format(d, f),
+ "ac_add_options --with-branding=browser/branding/nightly",
+ "ac_add_options --with-branding=browser/branding/aurora")
+ for d in ["browser/config/mozconfigs/linux32",
+ "browser/config/mozconfigs/linux64",
+ "browser/config/mozconfigs/win32",
+ "browser/config/mozconfigs/win64",
+ "browser/config/mozconfigs/macosx64"]
+ for f in ["debug", "nightly", "l10n-mozconfig"]
+ ] + [
+ # File, from, to
+ ("{}/l10n-nightly".format(d),
+ "ac_add_options --with-l10n-base=../../l10n-central",
+ "ac_add_options --with-l10n-base=..")
+ for d in ["mobile/android/config/mozconfigs/android-api-15/",
+ "mobile/android/config/mozconfigs/android-x86/"]
+ ] + [
+ # File, from, to
+ (f, "ac_add_options --enable-profiling", "") for f in
+ ["mobile/android/config/mozconfigs/android-api-15/nightly",
+ "mobile/android/config/mozconfigs/android-x86/nightly",
+ "browser/config/mozconfigs/linux32/nightly",
+ "browser/config/mozconfigs/linux64/nightly",
+ "browser/config/mozconfigs/macosx-universal/nightly",
+ "browser/config/mozconfigs/win32/nightly",
+ "browser/config/mozconfigs/win64/nightly"]
+ ] + [
+ # File, from, to
+ ("browser/confvars.sh",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-central",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-aurora"),
+ ("browser/confvars.sh",
+ "MAR_CHANNEL_ID=firefox-mozilla-central",
+ "MAR_CHANNEL_ID=firefox-mozilla-aurora"),
+ ("browser/config/mozconfigs/macosx-universal/nightly",
+ "ac_add_options --with-branding=browser/branding/nightly",
+ "ac_add_options --with-branding=browser/branding/aurora"),
+ ("browser/config/mozconfigs/macosx-universal/l10n-mozconfig",
+ "ac_add_options --with-branding=browser/branding/nightly",
+ "ac_add_options --with-branding=browser/branding/aurora"),
+ ("browser/config/mozconfigs/whitelist",
+ "ac_add_options --with-branding=browser/branding/nightly",
+ "ac_add_options --with-branding=browser/branding/aurora"),
+ ],
+ "locale_files": [
+ "browser/locales/shipped-locales",
+ "browser/locales/all-locales",
+ "mobile/android/locales/maemo-locales",
+ "mobile/android/locales/all-locales"
+ ],
+
+ "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'),
+ # "hg_share_base": None,
+ "tools_repo_url": "https://hg.mozilla.org/build/tools",
+ "tools_repo_branch": "default",
+ "from_repo_url": "ssh://hg.mozilla.org/mozilla-central",
+ "to_repo_url": "ssh://hg.mozilla.org/releases/mozilla-aurora",
+
+ "base_tag": "FIREFOX_AURORA_%(major_version)s_BASE",
+ "end_tag": "FIREFOX_AURORA_%(major_version)s_END",
+
+ "migration_behavior": "central_to_aurora",
+
+ "balrog_rules_to_lock": [
+ 8, # Fennec aurora channel
+ 10, # Firefox aurora channel
+ 18, # MetroFirefox aurora channel
+ ],
+ "balrog_credentials_file": "oauth.txt",
+
+ "virtualenv_modules": [
+ "requests==2.8.1",
+ ],
+
+ "post_merge_builders": [],
+ "post_merge_nightly_branches": [
+ "mozilla-central",
+ "mozilla-aurora",
+ ],
+}
diff --git a/testing/mozharness/configs/merge_day/release_to_esr.py b/testing/mozharness/configs/merge_day/release_to_esr.py
new file mode 100644
index 000000000..358c583da
--- /dev/null
+++ b/testing/mozharness/configs/merge_day/release_to_esr.py
@@ -0,0 +1,54 @@
+import os
+
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+NEW_ESR_REPO = "ssh://hg.mozilla.org/releases/mozilla-esr52"
+OLD_ESR_REPO = "https://hg.mozilla.org/releases/mozilla-esr45"
+OLD_ESR_CHANGESET = "d2d75f526882"
+
+config = {
+ "log_name": "relese_to_esr",
+ "version_files": [
+ {"file": "browser/config/version.txt", "suffix": ""},
+ {"file": "browser/config/version_display.txt", "suffix": ""},
+ {"file": "config/milestone.txt", "suffix": ""},
+ ],
+ "replacements": [
+ # File, from, to
+ ("browser/confvars.sh",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-esr"),
+ ("browser/confvars.sh",
+ "MAR_CHANNEL_ID=firefox-mozilla-release",
+ "MAR_CHANNEL_ID=firefox-mozilla-esr"),
+ ("build/mozconfig.common",
+ "# Enable checking that add-ons are signed by the trusted root",
+ "# Disable checking that add-ons are signed by the trusted root"),
+ ("build/mozconfig.common",
+ "MOZ_ADDON_SIGNING=${MOZ_ADDON_SIGNING-1}",
+ "MOZ_ADDON_SIGNING=${MOZ_ADDON_SIGNING-0}"),
+ ("build/mozconfig.common",
+ "# Enable enforcing that add-ons are signed by the trusted root",
+ "# Disable enforcing that add-ons are signed by the trusted root"),
+ ("build/mozconfig.common",
+ "MOZ_REQUIRE_SIGNING=${MOZ_REQUIRE_SIGNING-1}",
+ "MOZ_REQUIRE_SIGNING=${MOZ_REQUIRE_SIGNING-0}"),
+ ],
+ "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'),
+ # "hg_share_base": None,
+ "tools_repo_url": "https://hg.mozilla.org/build/tools",
+ "tools_repo_branch": "default",
+ "from_repo_url": "ssh://hg.mozilla.org/releases/mozilla-release",
+ "to_repo_url": NEW_ESR_REPO,
+
+ "base_tag": "FIREFOX_ESR_%(major_version)s_BASE",
+ "end_tag": "FIREFOX_ESR_%(major_version)s_END",
+
+ "migration_behavior": "release_to_esr",
+ "require_remove_locales": False,
+ "transplant_patches": [
+ {"repo": OLD_ESR_REPO,
+ "changeset": OLD_ESR_CHANGESET},
+ ],
+ "requires_head_merge": False,
+ "pull_all_branches": True,
+}
diff --git a/testing/mozharness/configs/merge_day/staging_beta_migration.py b/testing/mozharness/configs/merge_day/staging_beta_migration.py
new file mode 100644
index 000000000..9b6ac198e
--- /dev/null
+++ b/testing/mozharness/configs/merge_day/staging_beta_migration.py
@@ -0,0 +1,22 @@
+# Use this script in conjunction with aurora_to_beta.py.
+# mozharness/scripts/merge_day/gecko_migration.py -c \
+# mozharness/configs/merge_day/aurora_to_beta.py -c
+# mozharness/configs/merge_day/staging_beta_migration.py ...
+import os
+
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+
+config = {
+ "log_name": "staging_beta",
+
+ "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'),
+ "tools_repo_url": "https://hg.mozilla.org/build/tools",
+ "tools_repo_branch": "default",
+ "from_repo_url": "ssh://hg.mozilla.org/releases/mozilla-aurora",
+ "to_repo_url": "ssh://hg.mozilla.org/users/stage-ffxbld/mozilla-beta",
+
+ "base_tag": "FIREFOX_BETA_%(major_version)s_BASE",
+ "end_tag": "FIREFOX_BETA_%(major_version)s_END",
+
+ "migration_behavior": "aurora_to_beta",
+}
diff --git a/testing/mozharness/configs/multi_locale/android-mozharness-build.json b/testing/mozharness/configs/multi_locale/android-mozharness-build.json
new file mode 100644
index 000000000..b28f5c015
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/android-mozharness-build.json
@@ -0,0 +1,5 @@
+{
+ "work_dir": "build",
+ "locales_file": "src/mobile/android/locales/maemo-locales",
+ "mozilla_dir": "src"
+}
diff --git a/testing/mozharness/configs/multi_locale/ash_android-x86.json b/testing/mozharness/configs/multi_locale/ash_android-x86.json
new file mode 100644
index 000000000..6a37ce24f
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/ash_android-x86.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/projects/ash",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "l10n-central",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/ash_android.json b/testing/mozharness/configs/multi_locale/ash_android.json
new file mode 100644
index 000000000..831d4f7c3
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/ash_android.json
@@ -0,0 +1,27 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/projects/ash",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "l10n-central",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/b2g_linux32.py b/testing/mozharness/configs/multi_locale/b2g_linux32.py
new file mode 100644
index 000000000..8403f7553
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/b2g_linux32.py
@@ -0,0 +1,2 @@
+config = {
+}
diff --git a/testing/mozharness/configs/multi_locale/b2g_linux64.py b/testing/mozharness/configs/multi_locale/b2g_linux64.py
new file mode 100644
index 000000000..8403f7553
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/b2g_linux64.py
@@ -0,0 +1,2 @@
+config = {
+}
diff --git a/testing/mozharness/configs/multi_locale/b2g_macosx64.py b/testing/mozharness/configs/multi_locale/b2g_macosx64.py
new file mode 100644
index 000000000..8403f7553
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/b2g_macosx64.py
@@ -0,0 +1,2 @@
+config = {
+}
diff --git a/testing/mozharness/configs/multi_locale/b2g_win32.py b/testing/mozharness/configs/multi_locale/b2g_win32.py
new file mode 100644
index 000000000..a82ce7155
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/b2g_win32.py
@@ -0,0 +1,8 @@
+import sys
+
+config = {
+ "exes": {
+ "hg": "c:/mozilla-build/hg/hg",
+ "make": [sys.executable, "%(abs_work_dir)s/build/build/pymake/make.py"],
+ },
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-aurora_android-armv6.json b/testing/mozharness/configs/multi_locale/mozilla-aurora_android-armv6.json
new file mode 100644
index 000000000..dc50707cf
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-aurora_android-armv6.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-aurora",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-aurora",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-aurora",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-aurora_android-x86.json b/testing/mozharness/configs/multi_locale/mozilla-aurora_android-x86.json
new file mode 100644
index 000000000..bd5f8b6ba
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-aurora_android-x86.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-aurora",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-aurora",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-aurora",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-aurora_android.json b/testing/mozharness/configs/multi_locale/mozilla-aurora_android.json
new file mode 100644
index 000000000..1cc38e35b
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-aurora_android.json
@@ -0,0 +1,27 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-aurora",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-aurora",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-aurora",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-beta_android-armv6.json b/testing/mozharness/configs/multi_locale/mozilla-beta_android-armv6.json
new file mode 100644
index 000000000..4cffd4807
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-beta_android-armv6.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-beta_android-x86.json b/testing/mozharness/configs/multi_locale/mozilla-beta_android-x86.json
new file mode 100644
index 000000000..233e740aa
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-beta_android-x86.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-beta_android.json b/testing/mozharness/configs/multi_locale/mozilla-beta_android.json
new file mode 100644
index 000000000..c9d0e4d6b
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-beta_android.json
@@ -0,0 +1,27 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-central_android-armv6.json b/testing/mozharness/configs/multi_locale/mozilla-central_android-armv6.json
new file mode 100644
index 000000000..1b27a017e
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-central_android-armv6.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "l10n-central",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-central_android-x86.json b/testing/mozharness/configs/multi_locale/mozilla-central_android-x86.json
new file mode 100644
index 000000000..0873a0198
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-central_android-x86.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "l10n-central",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-central_android.json b/testing/mozharness/configs/multi_locale/mozilla-central_android.json
new file mode 100644
index 000000000..67d195242
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-central_android.json
@@ -0,0 +1,27 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "l10n-central",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-release_android-armv6.json b/testing/mozharness/configs/multi_locale/mozilla-release_android-armv6.json
new file mode 100644
index 000000000..fbfff0d7c
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-release_android-armv6.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-release_android-x86.json b/testing/mozharness/configs/multi_locale/mozilla-release_android-x86.json
new file mode 100644
index 000000000..96aa60cef
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-release_android-x86.json
@@ -0,0 +1,28 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/nightly"
+}
diff --git a/testing/mozharness/configs/multi_locale/mozilla-release_android.json b/testing/mozharness/configs/multi_locale/mozilla-release_android.json
new file mode 100644
index 000000000..9a737f9a9
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/mozilla-release_android.json
@@ -0,0 +1,27 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/mobile/android/locales/maemo-locales",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+ "hg_l10n_tag": "default",
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/release_mozilla-beta_android-armv6.json b/testing/mozharness/configs/multi_locale/release_mozilla-beta_android-armv6.json
new file mode 100644
index 000000000..beef77284
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/release_mozilla-beta_android-armv6.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+ "required_config_vars": ["tag_override"],
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/release_mozilla-beta_android-x86.json b/testing/mozharness/configs/multi_locale/release_mozilla-beta_android-x86.json
new file mode 100644
index 000000000..4f7144b40
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/release_mozilla-beta_android-x86.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+ "required_config_vars": ["tag_override"],
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/release_mozilla-beta_android.json b/testing/mozharness/configs/multi_locale/release_mozilla-beta_android.json
new file mode 100644
index 000000000..2fa9c06fd
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/release_mozilla-beta_android.json
@@ -0,0 +1,33 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+ "required_config_vars": ["tag_override"],
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/release_mozilla-release_android-armv6.json b/testing/mozharness/configs/multi_locale/release_mozilla-release_android-armv6.json
new file mode 100644
index 000000000..57406c739
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/release_mozilla-release_android-armv6.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+ "required_config_vars": ["tag_override"],
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/release_mozilla-release_android-x86.json b/testing/mozharness/configs/multi_locale/release_mozilla-release_android-x86.json
new file mode 100644
index 000000000..24075237e
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/release_mozilla-release_android-x86.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+ "required_config_vars": ["tag_override"],
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/release_mozilla-release_android.json b/testing/mozharness/configs/multi_locale/release_mozilla-release_android.json
new file mode 100644
index 000000000..e295a13eb
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/release_mozilla-release_android.json
@@ -0,0 +1,33 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "production",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+ "required_config_vars": ["tag_override"],
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-armv6.json b/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-armv6.json
new file mode 100644
index 000000000..032e04ff7
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-armv6.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s",
+ "required_config_vars": ["tag_override", "user_repo_override"],
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-x86.json b/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-x86.json
new file mode 100644
index 000000000..a055b0ab9
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android-x86.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s",
+ "required_config_vars": ["tag_override", "user_repo_override"],
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android.json b/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android.json
new file mode 100644
index 000000000..1447ffd91
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/staging_release_mozilla-beta_android.json
@@ -0,0 +1,33 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-beta",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s",
+ "required_config_vars": ["tag_override", "user_repo_override"],
+ "l10n_dir": "mozilla-beta",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-armv6.json b/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-armv6.json
new file mode 100644
index 000000000..5e2f26dc1
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-armv6.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s",
+ "required_config_vars": ["tag_override", "user_repo_override"],
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-armv6/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-x86.json b/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-x86.json
new file mode 100644
index 000000000..68feec852
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android-x86.json
@@ -0,0 +1,34 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s",
+ "required_config_vars": ["tag_override", "user_repo_override"],
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build",
+ "mozconfig": "build/mobile/android/config/mozconfigs/android-x86/release"
+}
diff --git a/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android.json b/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android.json
new file mode 100644
index 000000000..4ed17c487
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/staging_release_mozilla-release_android.json
@@ -0,0 +1,33 @@
+{
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": "obj-firefox",
+ "locales_file": "build/configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_platform": "android-multilocale",
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-release",
+ "branch": "default",
+ "dest": "build"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ },{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "l10n_repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "build/configs"
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s",
+ "required_config_vars": ["tag_override", "user_repo_override"],
+ "l10n_dir": "mozilla-release",
+ "merge_locales": true,
+ "mozilla_dir": "build"
+}
diff --git a/testing/mozharness/configs/multi_locale/standalone_mozilla-central.py b/testing/mozharness/configs/multi_locale/standalone_mozilla-central.py
new file mode 100644
index 000000000..36ad4de58
--- /dev/null
+++ b/testing/mozharness/configs/multi_locale/standalone_mozilla-central.py
@@ -0,0 +1,49 @@
+import os
+# The name of the directory we'll pull our source into.
+BUILD_DIR = "mozilla-central"
+# This is everything that comes after https://hg.mozilla.org/
+# e.g. "releases/mozilla-aurora"
+REPO_PATH = "mozilla-central"
+# This is where the l10n repos are (everything after https://hg.mozilla.org/)
+# for mozilla-central, that's "l10n-central".
+# For mozilla-aurora, that's "releases/l10n/mozilla-aurora"
+L10N_REPO_PATH = "l10n-central"
+# Currently this is assumed to be a subdirectory of your build dir
+OBJDIR = "objdir-droid"
+# Set this to mobile/xul for XUL Fennec
+ANDROID_DIR = "mobile/android"
+# Absolute path to your mozconfig.
+# By default it looks at "./mozconfig"
+MOZCONFIG = os.path.join(os.getcwd(), "mozconfig")
+
+config = {
+ "work_dir": ".",
+ "log_name": "multilocale",
+ "objdir": OBJDIR,
+ "locales_file": "%s/%s/locales/maemo-locales" % (BUILD_DIR, ANDROID_DIR),
+ "locales_dir": "%s/locales" % ANDROID_DIR,
+ "ignore_locales": ["en-US", "multi"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%s" % REPO_PATH,
+ "branch": "default",
+ "dest": BUILD_DIR,
+ }],
+ "vcs_share_base": "/builds/hg-shared",
+ "l10n_repos": [],
+ "hg_l10n_base": "https://hg.mozilla.org/%s" % L10N_REPO_PATH,
+ "hg_l10n_tag": "default",
+ "l10n_dir": "l10n",
+ "merge_locales": True,
+ "mozilla_dir": BUILD_DIR,
+ "mozconfig": MOZCONFIG,
+ "default_actions": [
+ "pull-locale-source",
+ "build",
+ "package-en-US",
+ "backup-objdir",
+ "restore-objdir",
+ "add-locales",
+ "package-multi",
+ "summary",
+ ],
+}
diff --git a/testing/mozharness/configs/partner_repacks/release_mozilla-esr52_desktop.py b/testing/mozharness/configs/partner_repacks/release_mozilla-esr52_desktop.py
new file mode 100644
index 000000000..604407e6a
--- /dev/null
+++ b/testing/mozharness/configs/partner_repacks/release_mozilla-esr52_desktop.py
@@ -0,0 +1,6 @@
+config = {
+ "appName": "Firefox",
+ "log_name": "partner_repack",
+ "repack_manifests_url": "https://github.com/mozilla-partners/mozilla-sha1-manifest",
+ "repo_file": "https://raw.githubusercontent.com/mozilla/git-repo/master/repo",
+}
diff --git a/testing/mozharness/configs/partner_repacks/release_mozilla-release_android.py b/testing/mozharness/configs/partner_repacks/release_mozilla-release_android.py
new file mode 100644
index 000000000..6978df8a2
--- /dev/null
+++ b/testing/mozharness/configs/partner_repacks/release_mozilla-release_android.py
@@ -0,0 +1,47 @@
+FTP_SERVER = "stage.mozilla.org"
+FTP_USER = "ffxbld"
+FTP_SSH_KEY = "~/.ssh/ffxbld_rsa"
+FTP_UPLOAD_BASE_DIR = "/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d"
+DOWNLOAD_BASE_URL = "http://%s%s" % (FTP_SERVER, FTP_UPLOAD_BASE_DIR)
+APK_BASE_NAME = "fennec-%(version)s.%(locale)s.android-arm.apk"
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+KEYSTORE = "/home/cltsign/.android/android-release.keystore"
+KEY_ALIAS = "release"
+
+config = {
+ "log_name": "partner_repack",
+ "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-release.json",
+ "additional_locales": ['en-US'],
+ "platforms": ["android"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ }],
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+ "ftp_upload_base_dir": FTP_UPLOAD_BASE_DIR,
+ "ftp_ssh_key": FTP_SSH_KEY,
+ "ftp_user": FTP_USER,
+ "ftp_server": FTP_SERVER,
+ "installer_base_names": {
+ "android": APK_BASE_NAME,
+ },
+ "partner_config": {
+ "google-play": {},
+ },
+ "download_unsigned_base_subdir": "unsigned/%(platform)s/%(locale)s",
+ "download_base_url": DOWNLOAD_BASE_URL,
+
+ "release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-release.py",
+
+ "default_actions": ["clobber", "pull", "download", "repack", "upload-unsigned-bits", "summary"],
+
+ # signing (optional)
+ "keystore": KEYSTORE,
+ "key_alias": KEY_ALIAS,
+ "exes": {
+ # This path doesn't exist and this file probably doesn't work
+ # Comment out to avoid confusion
+# "jarsigner": "/tools/jdk-1.6.0_17/bin/jarsigner",
+ "zipalign": "/tools/android-sdk-r8/tools/zipalign",
+ },
+}
diff --git a/testing/mozharness/configs/partner_repacks/release_mozilla-release_desktop.py b/testing/mozharness/configs/partner_repacks/release_mozilla-release_desktop.py
new file mode 100644
index 000000000..229c2bb44
--- /dev/null
+++ b/testing/mozharness/configs/partner_repacks/release_mozilla-release_desktop.py
@@ -0,0 +1,6 @@
+config = {
+ "appName": "Firefox",
+ "log_name": "partner_repack",
+ "repack_manifests_url": "git@github.com:mozilla-partners/repack-manifests.git",
+ "repo_file": "https://raw.githubusercontent.com/mozilla/git-repo/master/repo",
+}
diff --git a/testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_android.py b/testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_android.py
new file mode 100644
index 000000000..ffb2392b6
--- /dev/null
+++ b/testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_android.py
@@ -0,0 +1,52 @@
+FTP_SERVER = "dev-stage01.srv.releng.scl3.mozilla.com"
+FTP_USER = "ffxbld"
+FTP_SSH_KEY = "~/.ssh/ffxbld_rsa"
+FTP_UPLOAD_BASE_DIR = "/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d"
+#DOWNLOAD_BASE_URL = "http://%s%s" % (FTP_SERVER, FTP_UPLOAD_BASE_DIR)
+DOWNLOAD_BASE_URL = "https://ftp-ssl.mozilla.org/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d"
+#DOWNLOAD_BASE_URL = "http://dev-stage01.build.mozilla.org/pub/mozilla.org/mobile/candidates/11.0b1-candidates/build1/"
+APK_BASE_NAME = "fennec-%(version)s.%(locale)s.android-arm.apk"
+#APK_BASE_NAME = "fennec-11.0b1.%(locale)s.android-arm.apk"
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+#KEYSTORE = "/home/cltsign/.android/android-release.keystore"
+KEYSTORE = "/home/cltbld/.android/android.keystore"
+#KEY_ALIAS = "release"
+KEY_ALIAS = "nightly"
+
+config = {
+ "log_name": "partner_repack",
+ "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-release.json",
+ "additional_locales": ['en-US'],
+ "platforms": ["android"],
+ "repos": [{
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ }],
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+ "ftp_upload_base_dir": FTP_UPLOAD_BASE_DIR,
+ "ftp_ssh_key": FTP_SSH_KEY,
+ "ftp_user": FTP_USER,
+ "ftp_server": FTP_SERVER,
+ "installer_base_names": {
+ "android": APK_BASE_NAME,
+ },
+ "partner_config": {
+ "google-play": {},
+ },
+ "download_unsigned_base_subdir": "unsigned/%(platform)s/%(locale)s",
+ "download_base_url": DOWNLOAD_BASE_URL,
+
+ "release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-release.py",
+
+ "default_actions": ["clobber", "pull", "download", "repack", "upload-unsigned-bits", "summary"],
+
+ # signing (optional)
+ "keystore": KEYSTORE,
+ "key_alias": KEY_ALIAS,
+ "exes": {
+ # This path doesn't exist and this file probably doesn't work
+ # Comment out to avoid confusion
+# "jarsigner": "/tools/jdk-1.6.0_17/bin/jarsigner",
+ "zipalign": "/tools/android-sdk-r8/tools/zipalign",
+ },
+}
diff --git a/testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_desktop.py b/testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_desktop.py
new file mode 100644
index 000000000..229c2bb44
--- /dev/null
+++ b/testing/mozharness/configs/partner_repacks/staging_release_mozilla-release_desktop.py
@@ -0,0 +1,6 @@
+config = {
+ "appName": "Firefox",
+ "log_name": "partner_repack",
+ "repack_manifests_url": "git@github.com:mozilla-partners/repack-manifests.git",
+ "repo_file": "https://raw.githubusercontent.com/mozilla/git-repo/master/repo",
+}
diff --git a/testing/mozharness/configs/platform_supports_post_upload_to_latest.py b/testing/mozharness/configs/platform_supports_post_upload_to_latest.py
new file mode 100644
index 000000000..6ed654ed1
--- /dev/null
+++ b/testing/mozharness/configs/platform_supports_post_upload_to_latest.py
@@ -0,0 +1,3 @@
+config = {
+ 'platform_supports_post_upload_to_latest': False,
+}
diff --git a/testing/mozharness/configs/releases/bouncer_fennec.py b/testing/mozharness/configs/releases/bouncer_fennec.py
new file mode 100644
index 000000000..203c6679c
--- /dev/null
+++ b/testing/mozharness/configs/releases/bouncer_fennec.py
@@ -0,0 +1,22 @@
+# lint_ignore=E501
+config = {
+ "products": {
+ "apk": {
+ "product-name": "Fennec-%(version)s",
+ "check_uptake": True,
+ "alias": "fennec-latest",
+ "ssl-only": False,
+ "add-locales": False, # Do not add locales to let "multi" work
+ "paths": {
+ "android-api-15": {
+ "path": "/mobile/releases/%(version)s/android-api-15/:lang/fennec-%(version)s.:lang.android-arm.apk",
+ "bouncer-platform": "android",
+ },
+ "android-x86": {
+ "path": "/mobile/releases/%(version)s/android-x86/:lang/fennec-%(version)s.:lang.android-i386.apk",
+ "bouncer-platform": "android-x86",
+ },
+ },
+ },
+ },
+}
diff --git a/testing/mozharness/configs/releases/bouncer_firefox_beta.py b/testing/mozharness/configs/releases/bouncer_firefox_beta.py
new file mode 100644
index 000000000..6c563124c
--- /dev/null
+++ b/testing/mozharness/configs/releases/bouncer_firefox_beta.py
@@ -0,0 +1,148 @@
+# lint_ignore=E501
+config = {
+ "shipped-locales-url": "https://hg.mozilla.org/%(repo)s/raw-file/%(revision)s/browser/locales/shipped-locales",
+ "products": {
+ "installer": {
+ "product-name": "Firefox-%(version)s",
+ "check_uptake": True,
+ "alias": "firefox-beta-latest",
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "installer-ssl": {
+ "product-name": "Firefox-%(version)s-SSL",
+ "check_uptake": True,
+ "alias": "firefox-beta-latest-ssl",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "stub-installer": {
+ "product-name": "Firefox-%(version)s-stub",
+ "check_uptake": True,
+ "alias": "firefox-beta-stub",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20Stub%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ },
+ },
+ "sha1-installer": {
+ "product-name": "Firefox-%(version)s-sha1",
+ "check_uptake": True,
+ "alias": "firefox-beta-sha1",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32-sha1/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ },
+ },
+ "complete-mar": {
+ "product-name": "Firefox-%(version)s-Complete",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+ "partials": {
+ "releases-dir": {
+ "product-name": "Firefox-%(version)s-Partial-%(prev_version)s",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+}
diff --git a/testing/mozharness/configs/releases/bouncer_firefox_esr.py b/testing/mozharness/configs/releases/bouncer_firefox_esr.py
new file mode 100644
index 000000000..747ff5664
--- /dev/null
+++ b/testing/mozharness/configs/releases/bouncer_firefox_esr.py
@@ -0,0 +1,136 @@
+# lint_ignore=E501
+config = {
+ "shipped-locales-url": "https://hg.mozilla.org/%(repo)s/raw-file/%(revision)s/browser/locales/shipped-locales",
+ "products": {
+ "installer": {
+ "product-name": "Firefox-%(version)s",
+ "check_uptake": True,
+ "alias": "firefox-esr-latest",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "installer-ssl": {
+ "product-name": "Firefox-%(version)s-SSL",
+ "check_uptake": True,
+ "alias": "firefox-esr-latest-ssl",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "sha1-installer": {
+ "product-name": "Firefox-%(version)s-sha1",
+ "check_uptake": True,
+ # XP/Vista Release users are redicted to ESR52
+ "alias": "firefox-sha1",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32-sha1/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ },
+ },
+ "complete-mar": {
+ "product-name": "Firefox-%(version)s-Complete",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+ "partials": {
+ "releases-dir": {
+ "product-name": "Firefox-%(version)s-Partial-%(prev_version)s",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+}
diff --git a/testing/mozharness/configs/releases/bouncer_firefox_release.py b/testing/mozharness/configs/releases/bouncer_firefox_release.py
new file mode 100644
index 000000000..59ecd20a2
--- /dev/null
+++ b/testing/mozharness/configs/releases/bouncer_firefox_release.py
@@ -0,0 +1,191 @@
+# lint_ignore=E501
+config = {
+ "shipped-locales-url": "https://hg.mozilla.org/%(repo)s/raw-file/%(revision)s/browser/locales/shipped-locales",
+ "products": {
+ "installer": {
+ "product-name": "Firefox-%(version)s",
+ "check_uptake": True,
+ "alias": "firefox-latest",
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "installer-ssl": {
+ "product-name": "Firefox-%(version)s-SSL",
+ "check_uptake": True,
+ "alias": "firefox-latest-ssl",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "stub-installer": {
+ "product-name": "Firefox-%(version)s-stub",
+ "check_uptake": True,
+ "alias": "firefox-stub",
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20Stub%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ },
+ },
+ "complete-mar": {
+ "product-name": "Firefox-%(version)s-Complete",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "complete-mar-candidates": {
+ "product-name": "Firefox-%(version)sbuild%(build_number)s-Complete",
+ "check_uptake": False,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/linux-i686/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/linux-x86_64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/mac/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/win32/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/win64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+ "partials": {
+ "releases-dir": {
+ "product-name": "Firefox-%(version)s-Partial-%(prev_version)s",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "candidates-dir": {
+ "product-name": "Firefox-%(version)sbuild%(build_number)s-Partial-%(prev_version)sbuild%(prev_build_number)s",
+ "check_uptake": False,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/linux-i686/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/linux-x86_64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/mac/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/win32/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/candidates/%(version)s-candidates/build%(build_number)s/update/win64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+}
diff --git a/testing/mozharness/configs/releases/bouncer_thunderbird.py b/testing/mozharness/configs/releases/bouncer_thunderbird.py
new file mode 100644
index 000000000..5d0548a59
--- /dev/null
+++ b/testing/mozharness/configs/releases/bouncer_thunderbird.py
@@ -0,0 +1,169 @@
+# lint_ignore=E501
+config = {
+ "shipped-locales-url": "https://hg.mozilla.org/%(repo)s/raw-file/%(revision)s/mail/locales/shipped-locales",
+ "products": {
+ "installer": {
+ "product-name": "Thunderbird-%(version)s",
+ "check_uptake": True,
+ "alias": "thunderbird-latest",
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/thunderbird/releases/%(version)s/linux-i686/:lang/thunderbird-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/thunderbird/releases/%(version)s/linux-x86_64/:lang/thunderbird-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/thunderbird/releases/%(version)s/mac/:lang/Thunderbird%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/thunderbird/releases/%(version)s/win32/:lang/Thunderbird%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "opensolaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.opensolaris-i386.tar.bz2",
+ "bouncer-platform": "opensolaris-i386",
+ },
+ "opensolaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.opensolaris-sparc.tar.bz2",
+ "bouncer-platform": "opensolaris-sparc",
+ },
+ "solaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.solaris-i386.tar.bz2",
+ "bouncer-platform": "solaris-i386",
+ },
+ "solaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.solaris-sparc.tar.bz2",
+ "bouncer-platform": "solaris-sparc",
+ },
+ },
+ },
+ "installer-ssl": {
+ "product-name": "Thunderbird-%(version)s-SSL",
+ "check_uptake": True,
+ "ssl-only": True,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/thunderbird/releases/%(version)s/linux-i686/:lang/thunderbird-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/thunderbird/releases/%(version)s/linux-x86_64/:lang/thunderbird-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/thunderbird/releases/%(version)s/mac/:lang/Thunderbird%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/thunderbird/releases/%(version)s/win32/:lang/Thunderbird%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "opensolaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.opensolaris-i386.tar.bz2",
+ "bouncer-platform": "opensolaris-i386",
+ },
+ "opensolaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.opensolaris-sparc.tar.bz2",
+ "bouncer-platform": "opensolaris-sparc",
+ },
+ "solaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.solaris-i386.tar.bz2",
+ "bouncer-platform": "solaris-i386",
+ },
+ "solaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.solaris-sparc.tar.bz2",
+ "bouncer-platform": "solaris-sparc",
+ },
+ },
+ },
+ "complete-mar": {
+ "product-name": "Thunderbird-%(version)s-Complete",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/thunderbird/releases/%(version)s/update/linux-i686/:lang/thunderbird-%(version)s.complete.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/thunderbird/releases/%(version)s/update/linux-x86_64/:lang/thunderbird-%(version)s.complete.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/thunderbird/releases/%(version)s/update/mac/:lang/thunderbird-%(version)s.complete.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/thunderbird/releases/%(version)s/update/win32/:lang/thunderbird-%(version)s.complete.mar",
+ "bouncer-platform": "win",
+ },
+ "opensolaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.opensolaris-i386.complete.mar",
+ "bouncer-platform": "opensolaris-i386",
+ },
+ "opensolaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.opensolaris-sparc.complete.mar",
+ "bouncer-platform": "opensolaris-sparc",
+ },
+ "solaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.solaris-i386.complete.mar",
+ "bouncer-platform": "solaris-i386",
+ },
+ "solaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(version)s.en-US.solaris-sparc.complete.mar",
+ "bouncer-platform": "solaris-sparc",
+ },
+ },
+ },
+ },
+ "partials": {
+ "releases-dir": {
+ "product-name": "Thunderbird-%(version)s-Partial-%(prev_version)s",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": True,
+ "paths": {
+ "linux": {
+ "path": "/thunderbird/releases/%(version)s/update/linux-i686/:lang/thunderbird-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/thunderbird/releases/%(version)s/update/linux-x86_64/:lang/thunderbird-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/thunderbird/releases/%(version)s/update/mac/:lang/thunderbird-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/thunderbird/releases/%(version)s/update/win32/:lang/thunderbird-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win",
+ },
+ "opensolaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(prev_version)s-%(version)s.en-US.opensolaris-i386.partial.mar",
+ "bouncer-platform": "opensolaris-i386",
+ },
+ "opensolaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(prev_version)s-%(version)s.en-US.opensolaris-sparc.partial.mar",
+ "bouncer-platform": "opensolaris-sparc",
+ },
+ "solaris-i386": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(prev_version)s-%(version)s.en-US.solaris-i386.partial.mar",
+ "bouncer-platform": "solaris-i386",
+ },
+ "solaris-sparc": {
+ "path": "/thunderbird/releases/%(version)s/contrib/solaris_tarball/thunderbird-%(prev_version)s-%(version)s.en-US.solaris-sparc.partial.mar",
+ "bouncer-platform": "solaris-sparc",
+ },
+ },
+ },
+ },
+}
diff --git a/testing/mozharness/configs/releases/dev_bouncer_firefox_beta.py b/testing/mozharness/configs/releases/dev_bouncer_firefox_beta.py
new file mode 100644
index 000000000..29c6e6cfb
--- /dev/null
+++ b/testing/mozharness/configs/releases/dev_bouncer_firefox_beta.py
@@ -0,0 +1,133 @@
+# lint_ignore=E501
+config = {
+ "products": {
+ "installer": {
+ "product-name": "Firefox-%(version)s",
+ "check_uptake": True,
+ "alias": "firefox-beta-latest",
+ "ssl-only": False,
+ "add-locales": False,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "installer-ssl": {
+ "product-name": "Firefox-%(version)s-SSL",
+ "check_uptake": True,
+ "ssl-only": True,
+ "add-locales": False,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/linux-i686/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/linux-x86_64/:lang/firefox-%(version)s.tar.bz2",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/mac/:lang/Firefox%%20%(version)s.dmg",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/win64/:lang/Firefox%%20Setup%%20%(version)s.exe",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ "stub-installer": {
+ "product-name": "Firefox-%(version)s-stub",
+ "check_uptake": True,
+ "alias": "firefox-beta-stub",
+ "ssl-only": True,
+ "add-locales": False,
+ "paths": {
+ "win32": {
+ "path": "/firefox/releases/%(version)s/win32/:lang/Firefox%%20Setup%%20Stub%%20%(version)s.exe",
+ "bouncer-platform": "win",
+ },
+ },
+ },
+ "complete-mar": {
+ "product-name": "Firefox-%(version)s-Complete",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": False,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(version)s.complete.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+ "partials": {
+ "releases-dir": {
+ "product-name": "Firefox-%(version)s-Partial-%(prev_version)s",
+ "check_uptake": True,
+ "ssl-only": False,
+ "add-locales": False,
+ "paths": {
+ "linux": {
+ "path": "/firefox/releases/%(version)s/update/linux-i686/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux",
+ },
+ "linux64": {
+ "path": "/firefox/releases/%(version)s/update/linux-x86_64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "linux64",
+ },
+ "macosx64": {
+ "path": "/firefox/releases/%(version)s/update/mac/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "osx",
+ },
+ "win32": {
+ "path": "/firefox/releases/%(version)s/update/win32/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win",
+ },
+ "win64": {
+ "path": "/firefox/releases/%(version)s/update/win64/:lang/firefox-%(prev_version)s-%(version)s.partial.mar",
+ "bouncer-platform": "win64",
+ },
+ },
+ },
+ },
+}
diff --git a/testing/mozharness/configs/releases/dev_postrelease_firefox_beta.py b/testing/mozharness/configs/releases/dev_postrelease_firefox_beta.py
new file mode 100644
index 000000000..4ecd32349
--- /dev/null
+++ b/testing/mozharness/configs/releases/dev_postrelease_firefox_beta.py
@@ -0,0 +1,20 @@
+config = {
+ # date is used for staging mozilla-beta
+ "log_name": "bump_date",
+ "version_files": [{"file": "browser/config/version_display.txt"}],
+ "repo": {
+ # date is used for staging mozilla-beta
+ "repo": "https://hg.mozilla.org/projects/date",
+ "branch": "default",
+ "dest": "date",
+ "vcs": "hg",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ },
+ # date is used for staging mozilla-beta
+ "push_dest": "ssh://hg.mozilla.org/projects/date",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "ship_it_root": "https://ship-it-dev.allizom.org",
+ "ship_it_username": "ship_it-stage-ffxbld",
+}
diff --git a/testing/mozharness/configs/releases/dev_postrelease_firefox_release.py b/testing/mozharness/configs/releases/dev_postrelease_firefox_release.py
new file mode 100644
index 000000000..0a1497595
--- /dev/null
+++ b/testing/mozharness/configs/releases/dev_postrelease_firefox_release.py
@@ -0,0 +1,22 @@
+config = {
+ "log_name": "bump_release_dev",
+ "version_files": [
+ {"file": "browser/config/version.txt"},
+ {"file": "browser/config/version_display.txt"},
+ {"file": "config/milestone.txt"},
+ ],
+ "repo": {
+ # jamun is used for staging mozilla-release
+ "repo": "https://hg.mozilla.org/projects/jamun",
+ "branch": "default",
+ "dest": "jamun",
+ "vcs": "hg",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ },
+ "push_dest": "ssh://hg.mozilla.org/projects/jamun",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "ship_it_root": "https://ship-it-dev.allizom.org",
+ "ship_it_username": "ship_it-stage-ffxbld",
+}
diff --git a/testing/mozharness/configs/releases/dev_updates_firefox_beta.py b/testing/mozharness/configs/releases/dev_updates_firefox_beta.py
new file mode 100644
index 000000000..40b87c57b
--- /dev/null
+++ b/testing/mozharness/configs/releases/dev_updates_firefox_beta.py
@@ -0,0 +1,39 @@
+
+config = {
+ "log_name": "bump_beta_dev",
+ # TODO: use real repo
+ "repo": {
+ "repo": "https://hg.mozilla.org/users/raliiev_mozilla.com/tools",
+ "branch": "default",
+ "dest": "tools",
+ "vcs": "hg",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ # TODO: use real repo
+ "push_dest": "ssh://hg.mozilla.org/users/raliiev_mozilla.com/tools",
+ # date repo used for staging beta
+ "shipped-locales-url": "https://hg.mozilla.org/projects/date/raw-file/{revision}/browser/locales/shipped-locales",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "archive_domain": "ftp.stage.mozaws.net",
+ "archive_prefix": "https://ftp.stage.mozaws.net/pub",
+ "previous_archive_prefix": "https://archive.mozilla.org/pub",
+ "download_domain": "download.mozilla.org",
+ "balrog_url": "http://ec2-54-241-39-23.us-west-1.compute.amazonaws.com",
+ "balrog_username": "balrog-stage-ffxbld",
+ "update_channels": {
+ "beta-dev": {
+ "version_regex": r"^(\d+\.\d+(b\d+)?)$",
+ "requires_mirrors": True,
+ # TODO - when we use a real repo, rename this file # s/MozDate/MozBeta-dev/
+ "patcher_config": "mozDate-branch-patcher2.cfg",
+ "update_verify_channel": "beta-dev-localtest",
+ "mar_channel_ids": [],
+ "channel_names": ["beta-dev", "beta-dev-localtest", "beta-dev-cdntest"],
+ "rules_to_update": ["firefox-beta-dev-cdntest", "firefox-beta-dev-localtest"],
+ "publish_rules": ["firefox-beta"],
+ }
+ },
+ "balrog_use_dummy_suffix": False,
+}
diff --git a/testing/mozharness/configs/releases/dev_updates_firefox_release.py b/testing/mozharness/configs/releases/dev_updates_firefox_release.py
new file mode 100644
index 000000000..8c2696b5b
--- /dev/null
+++ b/testing/mozharness/configs/releases/dev_updates_firefox_release.py
@@ -0,0 +1,50 @@
+
+config = {
+ "log_name": "updates_release_dev",
+ # TODO: use real repo
+ "repo": {
+ "repo": "https://hg.mozilla.org/users/raliiev_mozilla.com/tools",
+ "branch": "default",
+ "dest": "tools",
+ "vcs": "hg",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ # TODO: use real repo
+ "push_dest": "ssh://hg.mozilla.org/users/raliiev_mozilla.com/tools",
+ # jamun repo used for staging release
+ "shipped-locales-url": "https://hg.mozilla.org/projects/jamun/raw-file/{revision}/browser/locales/shipped-locales",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "archive_domain": "ftp.stage.mozaws.net",
+ "archive_prefix": "https://ftp.stage.mozaws.net/pub",
+ "previous_archive_prefix": "https://archive.mozilla.org/pub",
+ "download_domain": "download.mozilla.org",
+ "balrog_url": "http://ec2-54-241-39-23.us-west-1.compute.amazonaws.com",
+ "balrog_username": "balrog-stage-ffxbld",
+ "update_channels": {
+ "beta-dev": {
+ "version_regex": r"^(\d+\.\d+(b\d+)?)$",
+ "requires_mirrors": False,
+ "patcher_config": "mozDate-branch-patcher2.cfg",
+ "update_verify_channel": "beta-dev-localtest",
+ "mar_channel_ids": [
+ "firefox-mozilla-beta-dev", "firefox-mozilla-release-dev",
+ ],
+ "channel_names": ["beta-dev", "beta-dev-localtest", "beta-dev-cdntest"],
+ "rules_to_update": ["firefox-beta-dev-cdntest", "firefox-beta-dev-localtest"],
+ "publish_rules": ["firefox-beta"],
+ },
+ "release-dev": {
+ "version_regex": r"^\d+\.\d+(\.\d+)?$",
+ "requires_mirrors": True,
+ "patcher_config": "mozJamun-branch-patcher2.cfg",
+ "update_verify_channel": "release-dev-localtest",
+ "mar_channel_ids": [],
+ "channel_names": ["release-dev", "release-dev-localtest", "release-dev-cdntest"],
+ "rules_to_update": ["firefox-release-dev-cdntest", "firefox-release-dev-localtest"],
+ "publish_rules": ["firefox-release"],
+ },
+ },
+ "balrog_use_dummy_suffix": False,
+}
diff --git a/testing/mozharness/configs/releases/postrelease_firefox_beta.py b/testing/mozharness/configs/releases/postrelease_firefox_beta.py
new file mode 100644
index 000000000..b72302d91
--- /dev/null
+++ b/testing/mozharness/configs/releases/postrelease_firefox_beta.py
@@ -0,0 +1,18 @@
+config = {
+ "log_name": "bump_beta",
+ "version_files": [{"file": "browser/config/version_display.txt"}],
+ "repo": {
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": "mozilla-beta",
+ "vcs": "hg",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ "push_dest": "ssh://hg.mozilla.org/releases/mozilla-beta",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "ship_it_root": "https://ship-it.mozilla.org",
+ "ship_it_username": "ship_it-ffxbld",
+}
diff --git a/testing/mozharness/configs/releases/postrelease_firefox_esr52.py b/testing/mozharness/configs/releases/postrelease_firefox_esr52.py
new file mode 100644
index 000000000..ab461c0c8
--- /dev/null
+++ b/testing/mozharness/configs/releases/postrelease_firefox_esr52.py
@@ -0,0 +1,22 @@
+config = {
+ "log_name": "bump_esr52",
+ "version_files": [
+ {"file": "browser/config/version.txt"},
+ {"file": "browser/config/version_display.txt"},
+ {"file": "config/milestone.txt"},
+ ],
+ "repo": {
+ "repo": "https://hg.mozilla.org/releases/mozilla-esr52",
+ "branch": "default",
+ "dest": "mozilla-esr52",
+ "vcs": "hg",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ "push_dest": "ssh://hg.mozilla.org/releases/mozilla-esr52",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "ship_it_root": "https://ship-it.mozilla.org",
+ "ship_it_username": "ship_it-ffxbld",
+}
diff --git a/testing/mozharness/configs/releases/postrelease_firefox_release.py b/testing/mozharness/configs/releases/postrelease_firefox_release.py
new file mode 100644
index 000000000..31a1b2774
--- /dev/null
+++ b/testing/mozharness/configs/releases/postrelease_firefox_release.py
@@ -0,0 +1,22 @@
+config = {
+ "log_name": "bump_release",
+ "version_files": [
+ {"file": "browser/config/version.txt"},
+ {"file": "browser/config/version_display.txt"},
+ {"file": "config/milestone.txt"},
+ ],
+ "repo": {
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": "mozilla-release",
+ "vcs": "hg",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ "push_dest": "ssh://hg.mozilla.org/releases/mozilla-release",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "ship_it_root": "https://ship-it.mozilla.org",
+ "ship_it_username": "ship_it-ffxbld",
+}
diff --git a/testing/mozharness/configs/releases/updates_firefox_beta.py b/testing/mozharness/configs/releases/updates_firefox_beta.py
new file mode 100644
index 000000000..fa81e085f
--- /dev/null
+++ b/testing/mozharness/configs/releases/updates_firefox_beta.py
@@ -0,0 +1,35 @@
+
+config = {
+ "log_name": "updates_beta",
+ "repo": {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ "vcs": "hg",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ "push_dest": "ssh://hg.mozilla.org/build/tools",
+ "shipped-locales-url": "https://hg.mozilla.org/releases/mozilla-beta/raw-file/{revision}/browser/locales/shipped-locales",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "archive_domain": "archive.mozilla.org",
+ "archive_prefix": "https://archive.mozilla.org/pub",
+ "previous_archive_prefix": "https://archive.mozilla.org/pub",
+ "download_domain": "download.mozilla.org",
+ "balrog_url": "https://aus5.mozilla.org",
+ "balrog_username": "balrog-ffxbld",
+ "update_channels": {
+ "beta": {
+ "version_regex": r"^(\d+\.\d+(b\d+)?)$",
+ "requires_mirrors": True,
+ "patcher_config": "mozBeta-branch-patcher2.cfg",
+ "update_verify_channel": "beta-localtest",
+ "mar_channel_ids": [],
+ "channel_names": ["beta", "beta-localtest", "beta-cdntest"],
+ "rules_to_update": ["firefox-beta-cdntest", "firefox-beta-localtest"],
+ "publish_rules": ["firefox-beta"],
+ },
+ },
+ "balrog_use_dummy_suffix": False,
+}
diff --git a/testing/mozharness/configs/releases/updates_firefox_esr52.py b/testing/mozharness/configs/releases/updates_firefox_esr52.py
new file mode 100644
index 000000000..6c5a05cf9
--- /dev/null
+++ b/testing/mozharness/configs/releases/updates_firefox_esr52.py
@@ -0,0 +1,35 @@
+
+config = {
+ "log_name": "updates_esr52",
+ "repo": {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ "vcs": "hg",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ "push_dest": "ssh://hg.mozilla.org/build/tools",
+ "shipped-locales-url": "https://hg.mozilla.org/releases/mozilla-esr52/raw-file/{revision}/browser/locales/shipped-locales",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "archive_domain": "archive.mozilla.org",
+ "archive_prefix": "https://archive.mozilla.org/pub",
+ "previous_archive_prefix": "https://archive.mozilla.org/pub",
+ "download_domain": "download.mozilla.org",
+ "balrog_url": "https://aus5.mozilla.org",
+ "balrog_username": "balrog-ffxbld",
+ "update_channels": {
+ "esr": {
+ "version_regex": r".*",
+ "requires_mirrors": True,
+ "patcher_config": "mozEsr52-branch-patcher2.cfg",
+ "update_verify_channel": "esr-localtest",
+ "mar_channel_ids": [],
+ "channel_names": ["esr", "esr-localtest", "esr-cdntest"],
+ "rules_to_update": ["esr52-cdntest", "esr52-localtest"],
+ "publish_rules": [521],
+ },
+ },
+ "balrog_use_dummy_suffix": False,
+}
diff --git a/testing/mozharness/configs/releases/updates_firefox_release.py b/testing/mozharness/configs/releases/updates_firefox_release.py
new file mode 100644
index 000000000..58210d371
--- /dev/null
+++ b/testing/mozharness/configs/releases/updates_firefox_release.py
@@ -0,0 +1,47 @@
+
+config = {
+ "log_name": "updates_release",
+ "repo": {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ "vcs": "hg",
+ },
+ "vcs_share_base": "/builds/hg-shared",
+ "push_dest": "ssh://hg.mozilla.org/build/tools",
+ "shipped-locales-url": "https://hg.mozilla.org/releases/mozilla-release/raw-file/{revision}/browser/locales/shipped-locales",
+ "ignore_no_changes": True,
+ "ssh_user": "ffxbld",
+ "ssh_key": "~/.ssh/ffxbld_rsa",
+ "archive_domain": "archive.mozilla.org",
+ "archive_prefix": "https://archive.mozilla.org/pub",
+ "previous_archive_prefix": "https://archive.mozilla.org/pub",
+ "download_domain": "download.mozilla.org",
+ "balrog_url": "https://aus5.mozilla.org",
+ "balrog_username": "balrog-ffxbld",
+ "update_channels": {
+ "beta": {
+ "version_regex": r"^(\d+\.\d+(b\d+)?)$",
+ "requires_mirrors": False,
+ "patcher_config": "mozBeta-branch-patcher2.cfg",
+ "update_verify_channel": "beta-localtest",
+ "mar_channel_ids": [
+ "firefox-mozilla-beta", "firefox-mozilla-release",
+ ],
+ "channel_names": ["beta", "beta-localtest", "beta-cdntest"],
+ "rules_to_update": ["firefox-beta-cdntest", "firefox-beta-localtest"],
+ "publish_rules": ["firefox-beta"],
+ },
+ "release": {
+ "version_regex": r"^\d+\.\d+(\.\d+)?$",
+ "requires_mirrors": True,
+ "patcher_config": "mozRelease-branch-patcher2.cfg",
+ "update_verify_channel": "release-localtest",
+ "mar_channel_ids": [],
+ "channel_names": ["release", "release-localtest", "release-cdntest"],
+ "rules_to_update": ["firefox-release-cdntest", "firefox-release-localtest"],
+ "publish_rules": ["firefox-release"],
+ },
+ },
+ "balrog_use_dummy_suffix": False,
+}
diff --git a/testing/mozharness/configs/releng_infra_configs/builders.py b/testing/mozharness/configs/releng_infra_configs/builders.py
new file mode 100644
index 000000000..3a6a8b595
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/builders.py
@@ -0,0 +1,47 @@
+# This config file has generic values needed for any job and any platform running
+# on Release Engineering machines inside the VPN
+from mozharness.base.script import platform_name
+
+# These are values specific to each platform on Release Engineering machines
+PYTHON_WIN32 = 'c:/mozilla-build/python27/python.exe'
+# These are values specific to running machines on Release Engineering machines
+# to run it locally on your machines append --cfg developer_config.py
+PLATFORM_CONFIG = {
+ 'linux64': {
+ 'exes': {
+ 'gittool.py': '/usr/local/bin/gittool.py',
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ },
+ 'env': {
+ 'DISPLAY': ':2',
+ }
+ },
+ 'macosx': {
+ 'exes': {
+ 'gittool.py': '/usr/local/bin/gittool.py',
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ },
+ },
+ 'win32': {
+ "exes": {
+ 'gittool.py': [PYTHON_WIN32, 'c:/builds/hg-shared/build/tools/buildfarm/utils/gittool.py'],
+ # Otherwise, depending on the PATH we can pick python 2.6 up
+ 'python': PYTHON_WIN32,
+ 'virtualenv': [PYTHON_WIN32, 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ }
+ }
+}
+
+config = PLATFORM_CONFIG[platform_name()]
+# Generic values
+config.update({
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ 'pip_index': False,
+ 'virtualenv_path': 'venv',
+})
+
diff --git a/testing/mozharness/configs/releng_infra_configs/linux.py b/testing/mozharness/configs/releng_infra_configs/linux.py
new file mode 100644
index 000000000..dbac47935
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/linux.py
@@ -0,0 +1,5 @@
+config = {
+ 'env': {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/linux/minidump_stackwalk',
+ }
+}
diff --git a/testing/mozharness/configs/releng_infra_configs/linux64.py b/testing/mozharness/configs/releng_infra_configs/linux64.py
new file mode 100644
index 000000000..d7e97d6e8
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/linux64.py
@@ -0,0 +1,5 @@
+config = {
+ 'env': {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/linux64/minidump_stackwalk',
+ }
+}
diff --git a/testing/mozharness/configs/releng_infra_configs/macosx64.py b/testing/mozharness/configs/releng_infra_configs/macosx64.py
new file mode 100644
index 000000000..c0b5948cc
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/macosx64.py
@@ -0,0 +1,5 @@
+config = {
+ 'env': {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/osx64/minidump_stackwalk',
+ }
+}
diff --git a/testing/mozharness/configs/releng_infra_configs/testers.py b/testing/mozharness/configs/releng_infra_configs/testers.py
new file mode 100644
index 000000000..7f0ce2a7f
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/testers.py
@@ -0,0 +1,67 @@
+# This config file has generic values needed for any job and any platform running
+# on Release Engineering machines inside the VPN
+import os
+
+import mozharness
+
+from mozharness.base.script import platform_name
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+# These are values specific to each platform on Release Engineering machines
+PYTHON_WIN32 = 'c:/mozilla-build/python27/python.exe'
+# These are values specific to running machines on Release Engineering machines
+# to run it locally on your machines append --cfg developer_config.py
+PLATFORM_CONFIG = {
+ 'linux': {
+ 'exes': {
+ 'gittool.py': os.path.join(external_tools_path, 'gittool.py'),
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ },
+ 'env': {
+ 'DISPLAY': ':0',
+ 'PATH': '%(PATH)s:' + external_tools_path,
+ }
+ },
+ 'linux64': {
+ 'exes': {
+ 'gittool.py': os.path.join(external_tools_path, 'gittool.py'),
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ },
+ 'env': {
+ 'DISPLAY': ':0',
+ 'PATH': '%(PATH)s:' + external_tools_path,
+ }
+ },
+ 'macosx': {
+ 'exes': {
+ 'gittool.py': os.path.join(external_tools_path, 'gittool.py'),
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ },
+ 'env': {
+ 'PATH': '%(PATH)s:' + external_tools_path,
+ }
+ },
+ 'win32': {
+ "exes": {
+ 'gittool.py': [PYTHON_WIN32, os.path.join(external_tools_path, 'gittool.py')],
+ # Otherwise, depending on the PATH we can pick python 2.6 up
+ 'python': PYTHON_WIN32,
+ 'virtualenv': [PYTHON_WIN32, 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ }
+ }
+}
+
+config = PLATFORM_CONFIG[platform_name()]
+# Generic values
+config.update({
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ 'pip_index': False,
+ 'virtualenv_path': 'venv',
+})
diff --git a/testing/mozharness/configs/releng_infra_configs/win32.py b/testing/mozharness/configs/releng_infra_configs/win32.py
new file mode 100644
index 000000000..778fa00d9
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/win32.py
@@ -0,0 +1,5 @@
+config = {
+ 'env': {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/win32/minidump_stackwalk',
+ }
+}
diff --git a/testing/mozharness/configs/releng_infra_configs/win64.py b/testing/mozharness/configs/releng_infra_configs/win64.py
new file mode 100644
index 000000000..97968793e
--- /dev/null
+++ b/testing/mozharness/configs/releng_infra_configs/win64.py
@@ -0,0 +1,5 @@
+config = {
+ 'env': {
+ 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/win64/minidump_stackwalk',
+ }
+}
diff --git a/testing/mozharness/configs/remove_executables.py b/testing/mozharness/configs/remove_executables.py
new file mode 100644
index 000000000..dec7a2965
--- /dev/null
+++ b/testing/mozharness/configs/remove_executables.py
@@ -0,0 +1,8 @@
+config = {
+ # We bake this directly into the tester image now...
+ "download_minidump_stackwalk": False,
+ "minidump_stackwalk_path": "/usr/local/bin/linux64-minidump_stackwalk",
+ "download_nodejs": False,
+ "nodejs_path": "/usr/local/bin/node",
+ "exes": {}
+}
diff --git a/testing/mozharness/configs/routes.json b/testing/mozharness/configs/routes.json
new file mode 100644
index 000000000..9596f4c97
--- /dev/null
+++ b/testing/mozharness/configs/routes.json
@@ -0,0 +1,18 @@
+{
+ "routes": [
+ "{index}.gecko.v2.{project}.revision.{head_rev}.{build_product}.{build_name}-{build_type}",
+ "{index}.gecko.v2.{project}.pushdate.{year}.{month}.{day}.{pushdate}.{build_product}.{build_name}-{build_type}",
+ "{index}.gecko.v2.{project}.latest.{build_product}.{build_name}-{build_type}"
+ ],
+ "nightly": [
+ "{index}.gecko.v2.{project}.nightly.{year}.{month}.{day}.revision.{head_rev}.{build_product}.{build_name}-{build_type}",
+ "{index}.gecko.v2.{project}.nightly.{year}.{month}.{day}.latest.{build_product}.{build_name}-{build_type}",
+ "{index}.gecko.v2.{project}.nightly.revision.{head_rev}.{build_product}.{build_name}-{build_type}",
+ "{index}.gecko.v2.{project}.nightly.latest.{build_product}.{build_name}-{build_type}"
+ ],
+ "l10n": [
+ "{index}.gecko.v2.{project}.revision.{head_rev}.{build_product}-l10n.{build_name}-{build_type}.{locale}",
+ "{index}.gecko.v2.{project}.pushdate.{year}.{month}.{day}.{pushdate}.{build_product}-l10n.{build_name}-{build_type}.{locale}",
+ "{index}.gecko.v2.{project}.latest.{build_product}-l10n.{build_name}-{build_type}.{locale}"
+ ]
+}
diff --git a/testing/mozharness/configs/selfserve/production.py b/testing/mozharness/configs/selfserve/production.py
new file mode 100644
index 000000000..f28c6c1ff
--- /dev/null
+++ b/testing/mozharness/configs/selfserve/production.py
@@ -0,0 +1,3 @@
+config = {
+ "selfserve_url": "https://secure.pub.build.mozilla.org/buildapi/self-serve",
+}
diff --git a/testing/mozharness/configs/selfserve/staging.py b/testing/mozharness/configs/selfserve/staging.py
new file mode 100644
index 000000000..e0ab70090
--- /dev/null
+++ b/testing/mozharness/configs/selfserve/staging.py
@@ -0,0 +1,3 @@
+config = {
+ "selfserve_url": "https://secure-pub-build.allizom.org/buildapi/self-serve",
+}
diff --git a/testing/mozharness/configs/servo/mac.py b/testing/mozharness/configs/servo/mac.py
new file mode 100644
index 000000000..c97f935bc
--- /dev/null
+++ b/testing/mozharness/configs/servo/mac.py
@@ -0,0 +1,3 @@
+config = {
+ 'concurrency': 6,
+}
diff --git a/testing/mozharness/configs/single_locale/alder.py b/testing/mozharness/configs/single_locale/alder.py
new file mode 100644
index 000000000..e2fc0e6a3
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/alder.py
@@ -0,0 +1,46 @@
+# This configuration uses mozilla-central binaries (en-US, localized complete
+# mars) and urls but it generates 'alder' artifacts. With this setup, binaries
+# generated on alder are NOT overwriting mozilla-central files.
+# Using this configuration, on a successful build, artifacts will be uploaded
+# here:
+#
+# * http://dev-stage01.srv.releng.scl3.mozilla.com/pub/mozilla.org/firefox/nightly/latest-alder-l10n/
+# (in staging environment)
+# * https://ftp.mozilla.org/pub/firefox/nightly/latest-alder-l10n/
+# (in production environment)
+#
+# If you really want to have localized alder builds, use the use the following
+# values:
+# * "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/alder-%(platform)s/latest/",
+# * "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/alder-%(platform)s/latest/",
+# * "repo": "https://hg.mozilla.org/projects/alder",
+#
+
+config = {
+ "nightly_build": True,
+ "branch": "alder",
+ "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/",
+ "update_channel": "nightly",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+
+ # mar
+ "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/mar-tools/%(platform)s",
+
+ # repositories
+ "mozilla_dir": "alder",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "branch": "default",
+ "dest": "alder",
+ }],
+ # purge options
+ 'is_automation': True,
+}
diff --git a/testing/mozharness/configs/single_locale/ash.py b/testing/mozharness/configs/single_locale/ash.py
new file mode 100644
index 000000000..3036d4fba
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/ash.py
@@ -0,0 +1,46 @@
+# This configuration uses mozilla-central binaries (en-US, localized complete
+# mars) and urls but it generates 'ash' artifacts. With this setup, binaries
+# generated on ash are NOT overwriting mozilla-central files.
+# Using this configuration, on a successful build, artifacts will be uploaded
+# here:
+#
+# * http://dev-stage01.srv.releng.scl3.mozilla.com/pub/mozilla.org/firefox/nightly/latest-ash-l10n/
+# (in staging environment)
+# * https://ftp.mozilla.org/pub/firefox/nightly/latest-ash-l10n/
+# (in production environment)
+#
+# If you really want to have localized ash builds, use the use the following
+# values:
+# * "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/ash-%(platform)s/latest/",
+# * "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/ash-%(platform)s/latest/",
+# * "repo": "https://hg.mozilla.org/projects/ash",
+#
+
+config = {
+ "nightly_build": True,
+ "branch": "ash",
+ "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/",
+ "update_channel": "nightly",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+
+ # mar
+ "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/mar-tools/%(platform)s",
+
+ # repositories
+ "mozilla_dir": "ash",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "branch": "default",
+ "dest": "ash",
+ }],
+ # purge options
+ 'is_automation': True,
+}
diff --git a/testing/mozharness/configs/single_locale/ash_android-api-15.py b/testing/mozharness/configs/single_locale/ash_android-api-15.py
new file mode 100644
index 000000000..d3cae75b7
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/ash_android-api-15.py
@@ -0,0 +1,97 @@
+BRANCH = "ash"
+MOZ_UPDATE_CHANNEL = "nightly"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/nightly/latest-%s-android-api-15/en-US" % BRANCH
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "branch": BRANCH,
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
+ "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR,
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US"],
+ "nightly_build": True,
+ 'balrog_credentials_file': 'oauth.txt',
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/projects/ash",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+
+ "l10n_dir": "l10n-central",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_OBJDIR": OBJDIR,
+ "EN_US_BINARY_URL": EN_US_BINARY_URL,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-nightly" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+ "stage_product": "mobile",
+ "platform": "android",
+ "build_type": "api-15-opt",
+
+ # Balrog
+ "build_target": "Android_arm-eabi-gcc3",
+
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/dev-mozilla-beta.py b/testing/mozharness/configs/single_locale/dev-mozilla-beta.py
new file mode 100644
index 000000000..ef96b9b7c
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/dev-mozilla-beta.py
@@ -0,0 +1,37 @@
+config = {
+ "branch": "date",
+ "nightly_build": True,
+ "update_channel": "beta-dev",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+
+ # repositories
+ # staging beta dev releases use date repo for now
+ "mozilla_dir": "date",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/projects/date",
+ "branch": "%(revision)s",
+ "dest": "date",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'is_automation': True,
+ 'purge_minsize': 12,
+ 'default_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "summary",
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/dev-mozilla-release.py b/testing/mozharness/configs/single_locale/dev-mozilla-release.py
new file mode 100644
index 000000000..09048310b
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/dev-mozilla-release.py
@@ -0,0 +1,37 @@
+config = {
+ "branch": "jamun",
+ "nightly_build": True,
+ "update_channel": "release-dev",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+
+ # repositories
+ # staging release uses jamun
+ "mozilla_dir": "jamun",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/projects/jamun",
+ "branch": "%(revision)s",
+ "dest": "jamun",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'purge_minsize': 12,
+ 'is_automation': True,
+ 'default_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "summary",
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/linux.py b/testing/mozharness/configs/single_locale/linux.py
new file mode 100644
index 000000000..3aa2c0349
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/linux.py
@@ -0,0 +1,123 @@
+import os
+
+config = {
+ "platform": "linux",
+ "stage_product": "firefox",
+ "update_platform": "Linux_x86-gcc3",
+ "mozconfig": "%(branch)s/browser/config/mozconfigs/linux32/l10n-mozconfig",
+ "bootstrap_env": {
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "DIST": "%(abs_objdir)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ },
+ "ssh_key_dir": "/home/mock_mozilla/.ssh",
+ "log_name": "single_locale",
+ "objdir": "obj-l10n",
+ "js_src_dir": "js/src",
+ "vcs_share_base": "/builds/hg-shared",
+
+ # tooltool
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/linux32/releng.manifest',
+ # balrog credential file:
+ 'balrog_credentials_file': 'oauth.txt',
+
+ # l10n
+ "ignore_locales": ["en-US", "ja-JP-mac"],
+ "l10n_dir": "l10n",
+ "locales_file": "%(branch)s/browser/locales/all-locales",
+ "locales_dir": "browser/locales",
+ "hg_l10n_tag": "default",
+ "merge_locales": True,
+
+ # MAR
+ "previous_mar_dir": "dist/previous",
+ "current_mar_dir": "dist/current",
+ "update_mar_dir": "dist/update", # sure?
+ "previous_mar_filename": "previous.mar",
+ "current_work_mar_dir": "current.work",
+ "package_base_dir": "dist/l10n-stage",
+ "application_ini": "application.ini",
+ "buildid_section": 'App',
+ "buildid_option": "BuildID",
+ "unpack_script": "tools/update-packaging/unwrap_full_update.pl",
+ "incremental_update_script": "tools/update-packaging/make_incremental_update.sh",
+ "balrog_release_pusher_script": "scripts/updates/balrog-release-pusher.py",
+ "update_packaging_dir": "tools/update-packaging",
+ "local_mar_tool_dir": "dist/host/bin",
+ "mar": "mar",
+ "mbsdiff": "mbsdiff",
+ "current_mar_filename": "firefox-%(version)s.%(locale)s.linux-i686.complete.mar",
+ "complete_mar": "firefox-%(version)s.en-US.linux-i686.complete.mar",
+ "localized_mar": "firefox-%(version)s.%(locale)s.linux-i686.complete.mar",
+ "partial_mar": "firefox-%(version)s.%(locale)s.linux-i686.partial.%(from_buildid)s-%(to_buildid)s.mar",
+ 'installer_file': "firefox-%(version)s.en-US.linux-i686.tar.bz2",
+
+ # Mock
+ 'mock_target': 'mozilla-centos6-x86_64',
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind',
+ ######## 32 bit specific ###########
+ 'glibc-static.i686', 'libstdc++-static.i686',
+ 'gtk2-devel.i686', 'libnotify-devel.i686',
+ 'alsa-lib-devel.i686', 'libcurl-devel.i686',
+ 'wireless-tools-devel.i686', 'libX11-devel.i686',
+ 'libXt-devel.i686', 'mesa-libGL-devel.i686',
+ 'gnome-vfs2-devel.i686', 'GConf2-devel.i686',
+ 'pulseaudio-libs-devel.i686',
+ 'gstreamer-devel.i686', 'gstreamer-plugins-base-devel.i686',
+ # Packages already installed in the mock environment, as x86_64
+ # packages.
+ 'glibc-devel.i686', 'libgcc.i686', 'libstdc++-devel.i686',
+ # yum likes to install .x86_64 -devel packages that satisfy .i686
+ # -devel packages dependencies. So manually install the dependencies
+ # of the above packages.
+ 'ORBit2-devel.i686', 'atk-devel.i686', 'cairo-devel.i686',
+ 'check-devel.i686', 'dbus-devel.i686', 'dbus-glib-devel.i686',
+ 'fontconfig-devel.i686', 'glib2-devel.i686',
+ 'hal-devel.i686', 'libICE-devel.i686', 'libIDL-devel.i686',
+ 'libSM-devel.i686', 'libXau-devel.i686', 'libXcomposite-devel.i686',
+ 'libXcursor-devel.i686', 'libXdamage-devel.i686',
+ 'libXdmcp-devel.i686', 'libXext-devel.i686', 'libXfixes-devel.i686',
+ 'libXft-devel.i686', 'libXi-devel.i686', 'libXinerama-devel.i686',
+ 'libXrandr-devel.i686', 'libXrender-devel.i686',
+ 'libXxf86vm-devel.i686', 'libdrm-devel.i686', 'libidn-devel.i686',
+ 'libpng-devel.i686', 'libxcb-devel.i686', 'libxml2-devel.i686',
+ 'pango-devel.i686', 'perl-devel.i686', 'pixman-devel.i686',
+ 'zlib-devel.i686',
+ # Freetype packages need to be installed be version, because a newer
+ # version is available, but we don't want it for Firefox builds.
+ 'freetype-2.3.11-6.el6_1.8.i686',
+ 'freetype-devel-2.3.11-6.el6_1.8.i686',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ ######## 32 bit specific ###########
+ ],
+ 'mock_files': [
+ ('/home/cltbld/.ssh', '/home/mock_mozilla/.ssh'),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/home/cltbld/.boto', '/builds/.boto'),
+ ('/builds/gapi.data', '/builds/gapi.data'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/linux32.py b/testing/mozharness/configs/single_locale/linux32.py
new file mode 120000
index 000000000..e9866bbbf
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/linux32.py
@@ -0,0 +1 @@
+linux.py \ No newline at end of file
diff --git a/testing/mozharness/configs/single_locale/linux64.py b/testing/mozharness/configs/single_locale/linux64.py
new file mode 100644
index 000000000..8a511e56d
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/linux64.py
@@ -0,0 +1,103 @@
+import os
+
+config = {
+ "platform": "linux64",
+ "stage_product": "firefox",
+ "update_platform": "Linux_x86_64-gcc3",
+ "mozconfig": "%(branch)s/browser/config/mozconfigs/linux64/l10n-mozconfig",
+ "bootstrap_env": {
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "DIST": "%(abs_objdir)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ },
+ "ssh_key_dir": "/home/mock_mozilla/.ssh",
+ "log_name": "single_locale",
+ "objdir": "obj-l10n",
+ "js_src_dir": "js/src",
+ "vcs_share_base": "/builds/hg-shared",
+
+ # tooltool
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/linux64/releng.manifest',
+ # balrog credential file:
+ 'balrog_credentials_file': 'oauth.txt',
+
+ # l10n
+ "ignore_locales": ["en-US", "ja-JP-mac"],
+ "l10n_dir": "l10n",
+ "locales_file": "%(branch)s/browser/locales/all-locales",
+ "locales_dir": "browser/locales",
+ "hg_l10n_tag": "default",
+ "merge_locales": True,
+
+ # MAR
+ "previous_mar_dir": "dist/previous",
+ "current_mar_dir": "dist/current",
+ "update_mar_dir": "dist/update", # sure?
+ "previous_mar_filename": "previous.mar",
+ "current_work_mar_dir": "current.work",
+ "package_base_dir": "dist/l10n-stage",
+ "application_ini": "application.ini",
+ "buildid_section": 'App',
+ "buildid_option": "BuildID",
+ "unpack_script": "tools/update-packaging/unwrap_full_update.pl",
+ "incremental_update_script": "tools/update-packaging/make_incremental_update.sh",
+ "balrog_release_pusher_script": "scripts/updates/balrog-release-pusher.py",
+ "update_packaging_dir": "tools/update-packaging",
+ "local_mar_tool_dir": "dist/host/bin",
+ "mar": "mar",
+ "mbsdiff": "mbsdiff",
+ "current_mar_filename": "firefox-%(version)s.%(locale)s.linux-x86_64.complete.mar",
+ "complete_mar": "firefox-%(version)s.en-US.linux-x86_64.complete.mar",
+ "localized_mar": "firefox-%(version)s.%(locale)s.linux-x86_64.complete.mar",
+ "partial_mar": "firefox-%(version)s.%(locale)s.linux-x86_64.partial.%(from_buildid)s-%(to_buildid)s.mar",
+ "installer_file": "firefox-%(version)s.en-US.linux-x86_64.tar.bz2",
+
+ # Mock
+ 'mock_target': 'mozilla-centos6-x86_64',
+
+ 'mock_packages': [
+ 'autoconf213', 'python', 'mozilla-python27', 'zip', 'mozilla-python27-mercurial',
+ 'git', 'ccache', 'perl-Test-Simple', 'perl-Config-General',
+ 'yasm', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ ### <-- from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'valgrind', 'dbus-x11',
+ ######## 64 bit specific ###########
+ 'glibc-static', 'libstdc++-static',
+ 'gtk2-devel', 'libnotify-devel',
+ 'alsa-lib-devel', 'libcurl-devel', 'wireless-tools-devel',
+ 'libX11-devel', 'libXt-devel', 'mesa-libGL-devel', 'gnome-vfs2-devel',
+ 'GConf2-devel',
+ ### from releng repo
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1',
+ 'yasm', 'ccache',
+ ###
+ 'pulseaudio-libs-devel', 'gstreamer-devel',
+ 'gstreamer-plugins-base-devel', 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64'
+ ],
+ 'mock_files': [
+ ('/home/cltbld/.ssh', '/home/mock_mozilla/.ssh'),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/home/cltbld/.boto', '/builds/.boto'),
+ ('/builds/gapi.data', '/builds/gapi.data'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/macosx64.py b/testing/mozharness/configs/single_locale/macosx64.py
new file mode 100644
index 000000000..c2ee47674
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/macosx64.py
@@ -0,0 +1,72 @@
+import os
+
+config = {
+ # mozconfig file to use, it depends on branch and platform names
+ "platform": "macosx64",
+ "stage_product": "firefox",
+ "update_platform": "Darwin_x86_64-gcc3",
+ "mozconfig": "%(branch)s/browser/config/mozconfigs/macosx-universal/l10n-mozconfig",
+ "bootstrap_env": {
+ "SHELL": '/bin/bash',
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "MOZ_PKG_PLATFORM": "mac",
+ # "IS_NIGHTLY": "yes",
+ "DIST": "%(abs_objdir)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ 'TOOLTOOL_CACHE': '/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/builds',
+ },
+ "ssh_key_dir": "~/.ssh",
+ "log_name": "single_locale",
+ "objdir": "obj-l10n",
+ "js_src_dir": "js/src",
+ "vcs_share_base": "/builds/hg-shared",
+
+ "upload_env_extra": {
+ "MOZ_PKG_PLATFORM": "mac",
+ },
+
+ # tooltool
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ 'tooltool_script': ["/builds/tooltool.py"],
+ 'tooltool_bootstrap': "setup.sh",
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/macosx64/releng.manifest',
+ # balrog credential file:
+ 'balrog_credentials_file': 'oauth.txt',
+
+ # l10n
+ "ignore_locales": ["en-US", "ja"],
+ "l10n_dir": "l10n",
+ "locales_file": "%(branch)s/browser/locales/all-locales",
+ "locales_dir": "browser/locales",
+ "hg_l10n_tag": "default",
+ "merge_locales": True,
+
+ # MAR
+ "previous_mar_dir": "dist/previous",
+ "current_mar_dir": "dist/current",
+ "update_mar_dir": "dist/update", # sure?
+ "previous_mar_filename": "previous.mar",
+ "current_work_mar_dir": "current.work",
+ "package_base_dir": "dist/l10n-stage",
+ "application_ini": "Contents/Resources/application.ini",
+ "buildid_section": 'App',
+ "buildid_option": "BuildID",
+ "unpack_script": "tools/update-packaging/unwrap_full_update.pl",
+ "incremental_update_script": "tools/update-packaging/make_incremental_update.sh",
+ "balrog_release_pusher_script": "scripts/updates/balrog-release-pusher.py",
+ "update_packaging_dir": "tools/update-packaging",
+ "local_mar_tool_dir": "dist/host/bin",
+ "mar": "mar",
+ "mbsdiff": "mbsdiff",
+ "current_mar_filename": "firefox-%(version)s.%(locale)s.mac.complete.mar",
+ "complete_mar": "firefox-%(version)s.en-US.mac.complete.mar",
+ "localized_mar": "firefox-%(version)s.%(locale)s.mac.complete.mar",
+ "partial_mar": "firefox-%(version)s.%(locale)s.mac.partial.%(from_buildid)s-%(to_buildid)s.mar",
+ 'installer_file': "firefox-%(version)s.en-US.mac.dmg",
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-aurora.py b/testing/mozharness/configs/single_locale/mozilla-aurora.py
new file mode 100644
index 000000000..1ce85f726
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-aurora.py
@@ -0,0 +1,29 @@
+config = {
+ "nightly_build": True,
+ "branch": "mozilla-aurora",
+ "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-aurora/",
+ "update_channel": "aurora",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-aurora",
+
+ # mar
+ "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-aurora/mar-tools/%(platform)s",
+
+ # repositories
+ "mozilla_dir": "mozilla-aurora",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/releases/mozilla-aurora",
+ "branch": "default",
+ "dest": "mozilla-aurora",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'is_automation': True,
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-aurora_android-api-15.py b/testing/mozharness/configs/single_locale/mozilla-aurora_android-api-15.py
new file mode 100644
index 000000000..103922a78
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-aurora_android-api-15.py
@@ -0,0 +1,97 @@
+BRANCH = "mozilla-aurora"
+MOZ_UPDATE_CHANNEL = "aurora"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/nightly/latest-%s-android-api-15/en-US" % BRANCH
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "branch": BRANCH,
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
+ "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR,
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US"],
+ "nightly_build": True,
+ 'balrog_credentials_file': 'oauth.txt',
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-aurora",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH,
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+
+ "l10n_dir": MOZILLA_DIR,
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_OBJDIR": OBJDIR,
+ "EN_US_BINARY_URL": EN_US_BINARY_URL,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-nightly" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+ "stage_product": "mobile",
+ "platform": "android",
+ "build_type": "api-15-opt",
+
+ # Balrog
+ "build_target": "Android_arm-eabi-gcc3",
+
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-beta.py b/testing/mozharness/configs/single_locale/mozilla-beta.py
new file mode 100644
index 000000000..90ff23027
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-beta.py
@@ -0,0 +1,37 @@
+config = {
+ "nightly_build": True,
+ "branch": "mozilla-beta",
+ "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-beta/",
+ "update_channel": "beta",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-beta",
+
+ # repositories
+ "mozilla_dir": "mozilla-beta",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "revision": "%(revision)s",
+ "dest": "mozilla-beta",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'purge_minsize': 12,
+ 'is_automation': True,
+ 'default_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "summary",
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-central.py b/testing/mozharness/configs/single_locale/mozilla-central.py
new file mode 100644
index 000000000..c2bf974d6
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-central.py
@@ -0,0 +1,29 @@
+config = {
+ "nightly_build": True,
+ "branch": "mozilla-central",
+ "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/",
+ "update_channel": "nightly",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+
+ # mar
+ "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/mar-tools/%(platform)s",
+
+ # repositories
+ "mozilla_dir": "mozilla-central",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "revision": "%(revision)s",
+ "dest": "mozilla-central",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'is_automation': True,
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-central_android-api-15.py b/testing/mozharness/configs/single_locale/mozilla-central_android-api-15.py
new file mode 100644
index 000000000..d2b6623c3
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-central_android-api-15.py
@@ -0,0 +1,97 @@
+BRANCH = "mozilla-central"
+MOZ_UPDATE_CHANNEL = "nightly"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/nightly/latest-%s-android-api-15/en-US" % BRANCH
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "branch": BRANCH,
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
+ "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR,
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US"],
+ "nightly_build": True,
+ 'balrog_credentials_file': 'oauth.txt',
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/mozilla-central",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+
+ "l10n_dir": "l10n-central",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_OBJDIR": OBJDIR,
+ "EN_US_BINARY_URL": EN_US_BINARY_URL,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-nightly" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+ "stage_product": "mobile",
+ "platform": "android",
+ "build_type": "api-15-opt",
+
+ # Balrog
+ "build_target": "Android_arm-eabi-gcc3",
+
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-esr52.py b/testing/mozharness/configs/single_locale/mozilla-esr52.py
new file mode 100644
index 000000000..0d01f1340
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-esr52.py
@@ -0,0 +1,37 @@
+config = {
+ "nightly_build": True,
+ "branch": "mozilla-esr52",
+ "en_us_binary_url": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-esr52/",
+ "update_channel": "esr",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+
+ # repositories
+ "mozilla_dir": "mozilla-esr52",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/releases/mozilla-esr52",
+ "revision": "%(revision)s",
+ "dest": "mozilla-esr52",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'purge_minsize': 12,
+ 'is_automation': True,
+ 'default_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "summary",
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/mozilla-release.py b/testing/mozharness/configs/single_locale/mozilla-release.py
new file mode 100644
index 000000000..f02ea2ca9
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/mozilla-release.py
@@ -0,0 +1,37 @@
+config = {
+ "nightly_build": True,
+ "branch": "mozilla-release",
+ "en_us_binary_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-release/",
+ "update_channel": "release",
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/mozilla-release",
+
+ # repositories
+ "mozilla_dir": "mozilla-release",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "revision": "%(revision)s",
+ "dest": "mozilla-release",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ }],
+ # purge options
+ 'purge_minsize': 12,
+ 'is_automation': True,
+ 'default_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "summary",
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/production.py b/testing/mozharness/configs/single_locale/production.py
new file mode 100644
index 000000000..fe97fe361
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/production.py
@@ -0,0 +1,14 @@
+config = {
+ "upload_environment": "prod",
+ "upload_env": {
+ "UPLOAD_USER": "ffxbld",
+ # ssh_key_dir is defined per platform: it is "~/.ssh" for every platform
+ # except when mock is in use, in this case, ssh_key_dir is
+ # /home/mock_mozilla/.ssh
+ "UPLOAD_SSH_KEY": "%(ssh_key_dir)s/ffxbld_rsa",
+ "UPLOAD_HOST": "upload.ffxbld.productdelivery.prod.mozaws.net",
+ "POST_UPLOAD_CMD": "post_upload.py -b %(branch)s-l10n -p %(stage_product)s -i %(buildid)s --release-to-latest --release-to-dated",
+ "UPLOAD_TO_TEMP": "1"
+ },
+ 'taskcluster_index': 'index',
+}
diff --git a/testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_15.py b/testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_15.py
new file mode 100644
index 000000000..976f21f44
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_15.py
@@ -0,0 +1,97 @@
+BRANCH = "mozilla-beta"
+MOZ_UPDATE_CHANNEL = "beta"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-15/en-US"
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "stage_product": "mobile",
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
+ "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_dir": "mobile/android/locales",
+ "locales_platform": "android",
+ "ignore_locales": ["en-US"],
+ "balrog_credentials_file": "oauth.txt",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "is_release_or_beta": True,
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-beta",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH,
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+ "l10n_dir": MOZILLA_DIR,
+
+ "release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-beta.py",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_PKG_VERSION": "%(version)s",
+ "MOZ_OBJDIR": OBJDIR,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "base_en_us_binary_url": EN_US_BINARY_URL,
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-15/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-release" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+ "key_alias": "release",
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ('/builds/mozilla-fennec-geoloc-api.key', '/builds/mozilla-fennec-geoloc-api.key'),
+ ('/builds/adjust-sdk-beta.token', '/builds/adjust-sdk-beta.token'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/release_mozilla-release_android_api_15.py b/testing/mozharness/configs/single_locale/release_mozilla-release_android_api_15.py
new file mode 100644
index 000000000..22d0074bb
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/release_mozilla-release_android_api_15.py
@@ -0,0 +1,97 @@
+BRANCH = "mozilla-release"
+MOZ_UPDATE_CHANNEL = "release"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-15/en-US"
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "stage_product": "mobile",
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
+ "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_dir": "mobile/android/locales",
+ "locales_platform": "android",
+ "ignore_locales": ["en-US"],
+ "balrog_credentials_file": "oauth.txt",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "is_release_or_beta": True,
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/releases/mozilla-release",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/build/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH,
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+ "l10n_dir": MOZILLA_DIR,
+
+ "release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-release.py",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_PKG_VERSION": "%(version)s",
+ "MOZ_OBJDIR": OBJDIR,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "base_en_us_binary_url": EN_US_BINARY_URL,
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-15/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-release" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+ "key_alias": "release",
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ('/builds/mozilla-fennec-geoloc-api.key', '/builds/mozilla-fennec-geoloc-api.key'),
+ ('/builds/adjust-sdk.token', '/builds/adjust-sdk.token'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/staging.py b/testing/mozharness/configs/single_locale/staging.py
new file mode 100644
index 000000000..82caa8dda
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/staging.py
@@ -0,0 +1,17 @@
+config = {
+ "upload_environment": "stage",
+ "upload_env": {
+ "UPLOAD_USER": "ffxbld",
+ # ssh_key_dir is defined per platform: it is "~/.ssh" for every platform
+ # except when mock is in use, in this case, ssh_key_dir is
+ # /home/mock_mozilla/.ssh
+ "UPLOAD_SSH_KEY": "%(ssh_key_dir)s/ffxbld_rsa",
+ "UPLOAD_HOST": "upload.ffxbld.productdelivery.stage.mozaws.net",
+ "POST_UPLOAD_CMD": "post_upload.py -b %(branch)s-l10n -p %(stage_product)s -i %(buildid)s --release-to-latest --release-to-dated %(post_upload_extra)s",
+ "UPLOAD_TO_TEMP": "1"
+ },
+ 'taskcluster_index': 'index.garbage.staging',
+ 'post_upload_extra': ['--bucket-prefix', 'net-mozaws-stage-delivery',
+ '--url-prefix', 'http://ftp.stage.mozaws.net/',
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_15.py b/testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_15.py
new file mode 100644
index 000000000..7f7d3e4e2
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_15.py
@@ -0,0 +1,97 @@
+BRANCH = "mozilla-beta"
+MOZ_UPDATE_CHANNEL = "beta"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+STAGE_SERVER = "ftp.stage.mozaws.net"
+EN_US_BINARY_URL = "http://" + STAGE_SERVER + "/pub/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-15/en-US"
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api-pub-build.allizom.org/clobberer/lastclobber",
+ "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-beta.json",
+ "locales_dir": "mobile/android/locales",
+ "locales_platform": "android",
+ "ignore_locales": ["en-US"],
+ "balrog_credentials_file": "oauth.txt",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "is_release_or_beta": True,
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-beta",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s/",
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+ "l10n_dir": MOZILLA_DIR,
+
+ "release_config_file": "buildbot-configs/mozilla/staging_release-fennec-mozilla-beta.py",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_PKG_VERSION": "%(version)s",
+ "MOZ_OBJDIR": OBJDIR,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "base_en_us_binary_url": EN_US_BINARY_URL,
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-15/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-release" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ('/builds/mozilla-fennec-geoloc-api.key', '/builds/mozilla-fennec-geoloc-api.key'),
+ ('/builds/adjust-sdk-beta.token', '/builds/adjust-sdk-beta.token'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_15.py b/testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_15.py
new file mode 100644
index 000000000..da4803a60
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_15.py
@@ -0,0 +1,97 @@
+BRANCH = "mozilla-release"
+MOZ_UPDATE_CHANNEL = "release"
+MOZILLA_DIR = BRANCH
+OBJDIR = "obj-l10n"
+STAGE_SERVER = "dev-stage01.srv.releng.scl3.mozilla.com"
+EN_US_BINARY_URL = "http://" + STAGE_SERVER + "/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-15/en-US"
+HG_SHARE_BASE_DIR = "/builds/hg-shared"
+
+config = {
+ "log_name": "single_locale",
+ "objdir": OBJDIR,
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api-pub-build.allizom.org/clobberer/lastclobber",
+ "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-release.json",
+ "locales_dir": "mobile/android/locales",
+ "locales_platform": "android",
+ "ignore_locales": ["en-US"],
+ "balrog_credentials_file": "oauth.txt",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "is_release_or_beta": True,
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "repos": [{
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-release",
+ "branch": "default",
+ "dest": MOZILLA_DIR,
+ }, {
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
+ "branch": "default",
+ "dest": "buildbot-configs"
+ }, {
+ "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s/",
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': HG_SHARE_BASE_DIR,
+ "l10n_dir": MOZILLA_DIR,
+
+ "release_config_file": "buildbot-configs/mozilla/staging_release-fennec-mozilla-release.py",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_PKG_VERSION": "%(version)s",
+ "MOZ_OBJDIR": OBJDIR,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
+ },
+ "base_en_us_binary_url": EN_US_BINARY_URL,
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-15/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-release" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ('/builds/mozilla-fennec-geoloc-api.key', '/builds/mozilla-fennec-geoloc-api.key'),
+ ('/builds/adjust-sdk.token', '/builds/adjust-sdk.token'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/tc_android-api-15.py b/testing/mozharness/configs/single_locale/tc_android-api-15.py
new file mode 100644
index 000000000..f15b254dc
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/tc_android-api-15.py
@@ -0,0 +1,18 @@
+import os
+
+config = {
+ "locales_file": "src/mobile/android/locales/all-locales",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "mozconfig": "src/mobile/android/config/mozconfigs/android-api-15/l10n-nightly",
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/src",
+ },
+ "tooltool_servers": ['http://relengapi/tooltool/'],
+
+ "upload_env": {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': '/home/worker/artifacts/',
+ },
+ "mozilla_dir": "src/",
+}
diff --git a/testing/mozharness/configs/single_locale/tc_linux32.py b/testing/mozharness/configs/single_locale/tc_linux32.py
new file mode 100644
index 000000000..3045138f8
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/tc_linux32.py
@@ -0,0 +1,24 @@
+import os
+
+config = {
+ "locales_file": "src/browser/locales/all-locales",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "mozconfig": "src/browser/config/mozconfigs/linux32/l10n-mozconfig",
+ "bootstrap_env": {
+ "NO_MERCURIAL_SETUP_CHECK": "1",
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "DIST": "%(abs_objdir)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ 'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
+ },
+ "upload_env": {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': '/home/worker/artifacts/',
+ },
+ "mozilla_dir": "src/",
+}
diff --git a/testing/mozharness/configs/single_locale/tc_linux64.py b/testing/mozharness/configs/single_locale/tc_linux64.py
new file mode 100644
index 000000000..28a4c6f56
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/tc_linux64.py
@@ -0,0 +1,24 @@
+import os
+
+config = {
+ "locales_file": "src/browser/locales/all-locales",
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "mozconfig": "src/browser/config/mozconfigs/linux64/l10n-mozconfig",
+ "bootstrap_env": {
+ "NO_MERCURIAL_SETUP_CHECK": "1",
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "DIST": "%(abs_objdir)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ 'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
+ },
+ "upload_env": {
+ 'UPLOAD_HOST': 'localhost',
+ 'UPLOAD_PATH': '/home/worker/artifacts/',
+ },
+ "mozilla_dir": "src/",
+}
diff --git a/testing/mozharness/configs/single_locale/try.py b/testing/mozharness/configs/single_locale/try.py
new file mode 100644
index 000000000..369159111
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/try.py
@@ -0,0 +1,42 @@
+config = {
+ "nightly_build": False,
+ "branch": "try",
+ "en_us_binary_url": "http://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central",
+ "update_channel": "nightly",
+ "update_gecko_source_to_enUS": False,
+
+ # l10n
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+
+ # mar
+ "mar_tools_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/latest-mozilla-central/mar-tools/%(platform)s",
+
+ # repositories
+ "mozilla_dir": "try",
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/try",
+ "revision": "%(revision)s",
+ "dest": "try",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ "clone_by_revision": True,
+ "clone_with_purge": True,
+ }],
+ # purge options
+ 'is_automation': True,
+ "upload_env": {
+ "UPLOAD_USER": "trybld",
+ # ssh_key_dir is defined per platform: it is "~/.ssh" for every platform
+ # except when mock is in use, in this case, ssh_key_dir is
+ # /home/mock_mozilla/.ssh
+ "UPLOAD_SSH_KEY": "%(ssh_key_dir)s/trybld_dsa",
+ "UPLOAD_HOST": "upload.trybld.productdelivery.%(upload_environment)s.mozaws.net",
+ "POST_UPLOAD_CMD": "post_upload.py --who %(who)s --builddir %(branch)s-%(platform)s --tinderbox-builds-dir %(who)s-%(revision)s -p %(stage_product)s -i %(buildid)s --revision %(revision)s --release-to-try-builds %(post_upload_extra)s",
+ "UPLOAD_TO_TEMP": "1"
+ },
+}
diff --git a/testing/mozharness/configs/single_locale/try_android-api-15.py b/testing/mozharness/configs/single_locale/try_android-api-15.py
new file mode 100644
index 000000000..74d397b65
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/try_android-api-15.py
@@ -0,0 +1,97 @@
+BRANCH = "try"
+MOZILLA_DIR = BRANCH
+EN_US_BINARY_URL = "http://archive.mozilla.org/pub/" \
+ "mobile/nightly/latest-mozilla-central-android-api-15/en-US"
+
+config = {
+ "branch": "try",
+ "log_name": "single_locale",
+ "objdir": "obj-l10n",
+ "is_automation": True,
+ "buildbot_json_path": "buildprops.json",
+ "force_clobber": True,
+ "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
+ "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR,
+ "locales_dir": "mobile/android/locales",
+ "ignore_locales": ["en-US"],
+ "nightly_build": False,
+ 'balrog_credentials_file': 'oauth.txt',
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ "tooltool_config": {
+ "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
+ "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
+ },
+ "exes": {
+ 'tooltool.py': '/builds/tooltool.py',
+ },
+ "update_gecko_source_to_enUS": False,
+ "repos": [{
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools",
+ }, {
+ "vcs": "hg",
+ "repo": "https://hg.mozilla.org/try",
+ "revision": "%(revision)s",
+ "dest": "try",
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ "clone_by_revision": True,
+ "clone_with_purge": True,
+ }],
+ "hg_l10n_base": "https://hg.mozilla.org/l10n-central",
+ "hg_l10n_tag": "default",
+ 'vcs_share_base': "/builds/hg-shared",
+
+ "l10n_dir": "l10n-central",
+ "repack_env": {
+ # so ugly, bug 951238
+ "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": EN_US_BINARY_URL,
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
+ "MOZ_UPDATE_CHANNEL": "try", # XXX Invalid
+ },
+ "upload_branch": "%s-android-api-15" % BRANCH,
+ "ssh_key_dir": "~/.ssh",
+ "merge_locales": True,
+ "mozilla_dir": MOZILLA_DIR,
+ "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-15/l10n-nightly" % MOZILLA_DIR,
+ "signature_verification_script": "tools/release/signing/verify-android-signature.sh",
+ "stage_product": "mobile",
+ "platform": "android", # XXX Validate
+ "build_type": "api-15-opt", # XXX Validate
+
+ # Balrog
+ "build_target": "Android_arm-eabi-gcc3",
+
+ # Mock
+ "mock_target": "mozilla-centos6-x86_64-android",
+ "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
+ 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
+ 'gtk2-devel', 'libnotify-devel', 'yasm',
+ 'alsa-lib-devel', 'libcurl-devel',
+ 'wireless-tools-devel', 'libX11-devel',
+ 'libXt-devel', 'mesa-libGL-devel',
+ 'gnome-vfs2-devel', 'GConf2-devel', 'wget',
+ 'mpfr', # required for system compiler
+ 'xorg-x11-font*', # fonts required for PGO
+ 'imake', # required for makedepend!?!
+ 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
+ 'valgrind', 'dbus-x11',
+ 'pulseaudio-libs-devel',
+ 'gstreamer-devel', 'gstreamer-plugins-base-devel',
+ 'freetype-2.3.11-6.el6_1.8.x86_64',
+ 'freetype-devel-2.3.11-6.el6_1.8.x86_64',
+ 'java-1.7.0-openjdk-devel',
+ 'openssh-clients',
+ 'zlib-devel-1.2.3-27.el6.i686',
+ ],
+ "mock_files": [
+ ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
+ ('/home/cltbld/.hgrc', '/builds/.hgrc'),
+ ('/builds/relengapi.tok', '/builds/relengapi.tok'),
+ ('/tools/tooltool.py', '/builds/tooltool.py'),
+ ('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
+ ],
+}
diff --git a/testing/mozharness/configs/single_locale/win32.py b/testing/mozharness/configs/single_locale/win32.py
new file mode 100644
index 000000000..ea07fff86
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/win32.py
@@ -0,0 +1,77 @@
+import os
+import sys
+
+config = {
+ "platform": "win32",
+ "stage_product": "firefox",
+ "update_platform": "WINNT_x86-msvc",
+ "mozconfig": "%(branch)s/browser/config/mozconfigs/win32/l10n-mozconfig",
+ "bootstrap_env": {
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "DIST": "%(abs_objdir)s",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ "PATH": 'C:\\mozilla-build\\nsis-3.01;'
+ '%s' % (os.environ.get('path')),
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ "ssh_key_dir": "~/.ssh",
+ "log_name": "single_locale",
+ "objdir": "obj-l10n",
+ "js_src_dir": "js/src",
+ "vcs_share_base": "c:/builds/hg-shared",
+
+ # tooltool
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/win32/releng.manifest',
+ # balrog credential file:
+ 'balrog_credentials_file': 'oauth.txt',
+
+ # l10n
+ "ignore_locales": ["en-US", "ja-JP-mac"],
+ "l10n_dir": "l10n",
+ "locales_file": "%(branch)s/browser/locales/all-locales",
+ "locales_dir": "browser/locales",
+ "hg_l10n_tag": "default",
+ "merge_locales": True,
+
+ # MAR
+ "previous_mar_dir": "dist\\previous",
+ "current_mar_dir": "dist\\current",
+ "update_mar_dir": "dist\\update", # sure?
+ "previous_mar_filename": "previous.mar",
+ "current_work_mar_dir": "current.work",
+ "package_base_dir": "dist\\l10n-stage",
+ "application_ini": "application.ini",
+ "buildid_section": 'App',
+ "buildid_option": "BuildID",
+ "unpack_script": "tools\\update-packaging\\unwrap_full_update.pl",
+ "incremental_update_script": "tools\\update-packaging\\make_incremental_update.sh",
+ "balrog_release_pusher_script": "scripts\\updates\\balrog-release-pusher.py",
+ "update_packaging_dir": "tools\\update-packaging",
+ "local_mar_tool_dir": "dist\\host\\bin",
+ "mar": "mar.exe",
+ "mbsdiff": "mbsdiff.exe",
+ "current_mar_filename": "firefox-%(version)s.%(locale)s.win32.complete.mar",
+ "complete_mar": "firefox-%(version)s.en-US.win32.complete.mar",
+ "localized_mar": "firefox-%(version)s.%(locale)s.win32.complete.mar",
+ "partial_mar": "firefox-%(version)s.%(locale)s.win32.partial.%(from_buildid)s-%(to_buildid)s.mar",
+ 'installer_file': "firefox-%(version)s.en-US.win32.installer.exe",
+
+ # use mozmake?
+ "enable_mozmake": True,
+ 'exes': {
+ 'python2.7': sys.executable,
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ }
+}
diff --git a/testing/mozharness/configs/single_locale/win64.py b/testing/mozharness/configs/single_locale/win64.py
new file mode 100644
index 000000000..df553018f
--- /dev/null
+++ b/testing/mozharness/configs/single_locale/win64.py
@@ -0,0 +1,77 @@
+import os
+import sys
+
+config = {
+ "platform": "win64",
+ "stage_product": "firefox",
+ "update_platform": "WINNT_x86_64-msvc",
+ "mozconfig": "%(branch)s/browser/config/mozconfigs/win64/l10n-mozconfig",
+ "bootstrap_env": {
+ "MOZ_OBJDIR": "obj-l10n",
+ "EN_US_BINARY_URL": "%(en_us_binary_url)s",
+ "MOZ_UPDATE_CHANNEL": "%(update_channel)s",
+ "DIST": "%(abs_objdir)s",
+ "LOCALE_MERGEDIR": "%(abs_merge_dir)s",
+ "L10NBASEDIR": "../../l10n",
+ "MOZ_MAKE_COMPLETE_MAR": "1",
+ "PATH": 'C:\\mozilla-build\\nsis-3.01;'
+ '%s' % (os.environ.get('path')),
+ 'TOOLTOOL_CACHE': '/c/builds/tooltool_cache',
+ 'TOOLTOOL_HOME': '/c/builds',
+ },
+ "ssh_key_dir": "~/.ssh",
+ "log_name": "single_locale",
+ "objdir": "obj-l10n",
+ "js_src_dir": "js/src",
+ "vcs_share_base": "c:/builds/hg-shared",
+
+ # tooltool
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ 'tooltool_script': [sys.executable,
+ 'C:/mozilla-build/tooltool.py'],
+ 'tooltool_bootstrap': "setup.sh",
+ 'tooltool_manifest_src': 'browser/config/tooltool-manifests/win64/releng.manifest',
+ # balrog credential file:
+ 'balrog_credentials_file': 'oauth.txt',
+
+ # l10n
+ "ignore_locales": ["en-US", "ja-JP-mac"],
+ "l10n_dir": "l10n",
+ "locales_file": "%(branch)s/browser/locales/all-locales",
+ "locales_dir": "browser/locales",
+ "hg_l10n_tag": "default",
+ "merge_locales": True,
+
+ # MAR
+ "previous_mar_dir": "dist\\previous",
+ "current_mar_dir": "dist\\current",
+ "update_mar_dir": "dist\\update", # sure?
+ "previous_mar_filename": "previous.mar",
+ "current_work_mar_dir": "current.work",
+ "package_base_dir": "dist\\l10n-stage",
+ "application_ini": "application.ini",
+ "buildid_section": 'App',
+ "buildid_option": "BuildID",
+ "unpack_script": "tools\\update-packaging\\unwrap_full_update.pl",
+ "incremental_update_script": "tools\\update-packaging\\make_incremental_update.sh",
+ "balrog_release_pusher_script": "scripts\\updates\\balrog-release-pusher.py",
+ "update_packaging_dir": "tools\\update-packaging",
+ "local_mar_tool_dir": "dist\\host\\bin",
+ "mar": "mar.exe",
+ "mbsdiff": "mbsdiff.exe",
+ "current_mar_filename": "firefox-%(version)s.%(locale)s.win64.complete.mar",
+ "complete_mar": "firefox-%(version)s.en-US.win64.complete.mar",
+ "localized_mar": "firefox-%(version)s.%(locale)s.win64.complete.mar",
+ "partial_mar": "firefox-%(version)s.%(locale)s.win64.partial.%(from_buildid)s-%(to_buildid)s.mar",
+ 'installer_file': "firefox-%(version)s.en-US.win64.installer.exe",
+
+ # use mozmake?
+ "enable_mozmake": True,
+ 'exes': {
+ 'python2.7': sys.executable,
+ 'virtualenv': [
+ sys.executable,
+ 'c:/mozilla-build/buildbotve/virtualenv.py'
+ ],
+ }
+}
diff --git a/testing/mozharness/configs/talos/linux_config.py b/testing/mozharness/configs/talos/linux_config.py
new file mode 100644
index 000000000..192de17c6
--- /dev/null
+++ b/testing/mozharness/configs/talos/linux_config.py
@@ -0,0 +1,46 @@
+import os
+import platform
+
+PYTHON = '/tools/buildbot/bin/python'
+VENV_PATH = '%s/build/venv' % os.getcwd()
+if platform.architecture()[0] == '64bit':
+ TOOLTOOL_MANIFEST_PATH = "config/tooltool-manifests/linux64/releng.manifest"
+ MINIDUMP_STACKWALK_PATH = "linux64-minidump_stackwalk"
+else:
+ TOOLTOOL_MANIFEST_PATH = "config/tooltool-manifests/linux32/releng.manifest"
+ MINIDUMP_STACKWALK_PATH = "linux32-minidump_stackwalk"
+
+config = {
+ "log_name": "talos",
+ "buildbot_json_path": "buildprops.json",
+ "installer_path": "installer.exe",
+ "virtualenv_path": VENV_PATH,
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ "exes": {
+ 'python': PYTHON,
+ 'virtualenv': [PYTHON, '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+ "title": os.uname()[1].lower().split('.')[0],
+ "default_actions": [
+ "clobber",
+ "read-buildbot-config",
+ "download-and-extract",
+ "populate-webroot",
+ "create-virtualenv",
+ "install",
+ "run-tests",
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": MINIDUMP_STACKWALK_PATH,
+ "minidump_tooltool_manifest_path": TOOLTOOL_MANIFEST_PATH,
+ "tooltool_cache": "/builds/tooltool_cache",
+}
diff --git a/testing/mozharness/configs/talos/mac_config.py b/testing/mozharness/configs/talos/mac_config.py
new file mode 100644
index 000000000..56876dbdd
--- /dev/null
+++ b/testing/mozharness/configs/talos/mac_config.py
@@ -0,0 +1,56 @@
+ENABLE_SCREEN_RESOLUTION_CHECK = True
+
+SCREEN_RESOLUTION_CHECK = {
+ "name": "check_screen_resolution",
+ "cmd": ["bash", "-c", "screenresolution get && screenresolution list && system_profiler SPDisplaysDataType"],
+ "architectures": ["32bit", "64bit"],
+ "halt_on_failure": False,
+ "enabled": ENABLE_SCREEN_RESOLUTION_CHECK
+}
+
+import os
+
+PYTHON = '/tools/buildbot/bin/python'
+VENV_PATH = '%s/build/venv' % os.getcwd()
+
+config = {
+ "log_name": "talos",
+ "buildbot_json_path": "buildprops.json",
+ "installer_path": "installer.exe",
+ "virtualenv_path": VENV_PATH,
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ "exes": {
+ 'python': PYTHON,
+ 'virtualenv': [PYTHON, '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+ "title": os.uname()[1].lower().split('.')[0],
+ "default_actions": [
+ "clobber",
+ "read-buildbot-config",
+ "download-and-extract",
+ "populate-webroot",
+ "create-virtualenv",
+ "install",
+ "run-tests",
+ ],
+ "run_cmd_checks_enabled": True,
+ "preflight_run_cmd_suites": [
+ SCREEN_RESOLUTION_CHECK,
+ ],
+ "postflight_run_cmd_suites": [
+ SCREEN_RESOLUTION_CHECK,
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": "macosx64-minidump_stackwalk",
+ "minidump_tooltool_manifest_path": "config/tooltool-manifests/macosx64/releng.manifest",
+ "tooltool_cache": "/builds/tooltool_cache",
+}
diff --git a/testing/mozharness/configs/talos/windows_config.py b/testing/mozharness/configs/talos/windows_config.py
new file mode 100644
index 000000000..50c924c44
--- /dev/null
+++ b/testing/mozharness/configs/talos/windows_config.py
@@ -0,0 +1,48 @@
+import os
+import socket
+
+PYTHON = 'c:/mozilla-build/python27/python.exe'
+PYTHON_DLL = 'c:/mozilla-build/python27/python27.dll'
+VENV_PATH = os.path.join(os.getcwd(), 'build/venv')
+
+config = {
+ "log_name": "talos",
+ "buildbot_json_path": "buildprops.json",
+ "installer_path": "installer.exe",
+ "virtualenv_path": VENV_PATH,
+ "virtualenv_python_dll": PYTHON_DLL,
+ "pip_index": False,
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "virtualenv_modules": ['pywin32', 'talos', 'mozinstall'],
+ "exes": {
+ 'python': PYTHON,
+ 'virtualenv': [PYTHON, 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ 'easy_install': ['%s/scripts/python' % VENV_PATH,
+ '%s/scripts/easy_install-2.7-script.py' % VENV_PATH],
+ 'mozinstall': ['%s/scripts/python' % VENV_PATH,
+ '%s/scripts/mozinstall-script.py' % VENV_PATH],
+ 'hg': 'c:/mozilla-build/hg/hg',
+ 'tooltool.py': [PYTHON, 'C:/mozilla-build/tooltool.py'],
+ },
+ "title": socket.gethostname().split('.')[0],
+ "default_actions": [
+ "clobber",
+ "read-buildbot-config",
+ "download-and-extract",
+ "populate-webroot",
+ "create-virtualenv",
+ "install",
+ "run-tests",
+ ],
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+ "metro_harness_path_frmt": "%(metro_base_path)s/metro/metrotestharness.exe",
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": "win32-minidump_stackwalk.exe",
+ "minidump_tooltool_manifest_path": "config/tooltool-manifests/win32/releng.manifest",
+}
diff --git a/testing/mozharness/configs/taskcluster_nightly.py b/testing/mozharness/configs/taskcluster_nightly.py
new file mode 100644
index 000000000..6c4e4a754
--- /dev/null
+++ b/testing/mozharness/configs/taskcluster_nightly.py
@@ -0,0 +1,5 @@
+config = {
+ 'nightly_build': True,
+ 'taskcluster_nightly': True,
+}
+
diff --git a/testing/mozharness/configs/test/example_config1.json b/testing/mozharness/configs/test/example_config1.json
new file mode 100644
index 000000000..ca73466ba
--- /dev/null
+++ b/testing/mozharness/configs/test/example_config1.json
@@ -0,0 +1,5 @@
+{
+ "beverage": "fizzy drink",
+ "long_sleep_time": 1800,
+ "random_config_key1": "spectacular"
+}
diff --git a/testing/mozharness/configs/test/example_config2.py b/testing/mozharness/configs/test/example_config2.py
new file mode 100644
index 000000000..958543b60
--- /dev/null
+++ b/testing/mozharness/configs/test/example_config2.py
@@ -0,0 +1,5 @@
+config = {
+ "beverage": "cider",
+ "long_sleep_time": 300,
+ "random_config_key2": "wunderbar",
+}
diff --git a/testing/mozharness/configs/test/test.illegal_suffix b/testing/mozharness/configs/test/test.illegal_suffix
new file mode 100644
index 000000000..7d9a4d96d
--- /dev/null
+++ b/testing/mozharness/configs/test/test.illegal_suffix
@@ -0,0 +1,20 @@
+{
+ "log_name": "test",
+ "log_dir": "test_logs",
+ "log_to_console": false,
+ "key1": "value1",
+ "key2": "value2",
+ "section1": {
+
+ "subsection1": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ "subsection2": {
+ "key1": "value1",
+ "key2": "value2"
+ }
+
+ }
+}
diff --git a/testing/mozharness/configs/test/test.json b/testing/mozharness/configs/test/test.json
new file mode 100644
index 000000000..7d9a4d96d
--- /dev/null
+++ b/testing/mozharness/configs/test/test.json
@@ -0,0 +1,20 @@
+{
+ "log_name": "test",
+ "log_dir": "test_logs",
+ "log_to_console": false,
+ "key1": "value1",
+ "key2": "value2",
+ "section1": {
+
+ "subsection1": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ "subsection2": {
+ "key1": "value1",
+ "key2": "value2"
+ }
+
+ }
+}
diff --git a/testing/mozharness/configs/test/test.py b/testing/mozharness/configs/test/test.py
new file mode 100644
index 000000000..84fc357b2
--- /dev/null
+++ b/testing/mozharness/configs/test/test.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+config = {
+ "log_name": "test",
+ "log_dir": "test_logs",
+ "log_to_console": False,
+ "key1": "value1",
+ "key2": "value2",
+ "section1": {
+
+ "subsection1": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ "subsection2": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ },
+ "opt_override": "some stuff",
+}
diff --git a/testing/mozharness/configs/test/test_malformed.json b/testing/mozharness/configs/test/test_malformed.json
new file mode 100644
index 000000000..260be45b8
--- /dev/null
+++ b/testing/mozharness/configs/test/test_malformed.json
@@ -0,0 +1,20 @@
+{
+ "log_name": "test",
+ "log_dir": "test_logs",
+ "log_to_console": false,
+ "key1": "value1",
+ "key2": "value2",
+ "section1": {
+
+ "subsection1": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ "subsection2": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ }
+}
diff --git a/testing/mozharness/configs/test/test_malformed.py b/testing/mozharness/configs/test/test_malformed.py
new file mode 100644
index 000000000..e7ccefd15
--- /dev/null
+++ b/testing/mozharness/configs/test/test_malformed.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+config = {
+ "log_name": "test",
+ "log_dir": "test_logs",
+ "log_to_console": False,
+ "key1": "value1",
+ "key2": "value2",
+ "section1": {
+
+ "subsection1": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+a;sldkfjas;dfkljasdf;kjasdf;ljkadsflkjsdfkweoi
+ "subsection2": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+
+ },
+}
diff --git a/testing/mozharness/configs/test/test_optional.py b/testing/mozharness/configs/test/test_optional.py
new file mode 100644
index 000000000..4eb13b3df
--- /dev/null
+++ b/testing/mozharness/configs/test/test_optional.py
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+config = {
+ "opt_override": "new stuff",
+}
diff --git a/testing/mozharness/configs/test/test_override.py b/testing/mozharness/configs/test/test_override.py
new file mode 100644
index 000000000..00db5220a
--- /dev/null
+++ b/testing/mozharness/configs/test/test_override.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+config = {
+ "override_string": "TODO",
+ "override_list": ['to', 'do'],
+ "override_dict": {'to': 'do'},
+ "keep_string": "don't change me",
+}
diff --git a/testing/mozharness/configs/test/test_override2.py b/testing/mozharness/configs/test/test_override2.py
new file mode 100644
index 000000000..27091d453
--- /dev/null
+++ b/testing/mozharness/configs/test/test_override2.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+config = {
+ "override_string": "yay",
+ "override_list": ["yay", 'worked'],
+ "override_dict": {"yay": 'worked'},
+}
diff --git a/testing/mozharness/configs/unittests/linux_unittest.py b/testing/mozharness/configs/unittests/linux_unittest.py
new file mode 100644
index 000000000..77e4ed501
--- /dev/null
+++ b/testing/mozharness/configs/unittests/linux_unittest.py
@@ -0,0 +1,306 @@
+import os
+import platform
+
+# OS Specifics
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+BINARY_PATH = os.path.join(ABS_WORK_DIR, "application", "firefox", "firefox-bin")
+INSTALLER_PATH = os.path.join(ABS_WORK_DIR, "installer.tar.bz2")
+XPCSHELL_NAME = "xpcshell"
+EXE_SUFFIX = ""
+DISABLE_SCREEN_SAVER = True
+ADJUST_MOUSE_AND_SCREEN = False
+
+# Note: keep these Valgrind .sup file names consistent with those
+# in testing/mochitest/mochitest_options.py.
+VALGRIND_SUPP_DIR = os.path.join(os.getcwd(), "build/tests/mochitest")
+VALGRIND_SUPP_CROSS_ARCH = os.path.join(VALGRIND_SUPP_DIR,
+ "cross-architecture.sup")
+VALGRIND_SUPP_ARCH = None
+
+if platform.architecture()[0] == "64bit":
+ TOOLTOOL_MANIFEST_PATH = "config/tooltool-manifests/linux64/releng.manifest"
+ MINIDUMP_STACKWALK_PATH = "linux64-minidump_stackwalk"
+ VALGRIND_SUPP_ARCH = os.path.join(VALGRIND_SUPP_DIR,
+ "x86_64-redhat-linux-gnu.sup")
+ NODEJS_PATH = "node-linux-x64/bin/node"
+ NODEJS_TOOLTOOL_MANIFEST_PATH = "config/tooltool-manifests/linux64/nodejs.manifest"
+else:
+ TOOLTOOL_MANIFEST_PATH = "config/tooltool-manifests/linux32/releng.manifest"
+ MINIDUMP_STACKWALK_PATH = "linux32-minidump_stackwalk"
+ VALGRIND_SUPP_ARCH = os.path.join(VALGRIND_SUPP_DIR,
+ "i386-redhat-linux-gnu.sup")
+ NODEJS_PATH = "node-linux-x86/bin/node"
+ NODEJS_TOOLTOOL_MANIFEST_PATH = "config/tooltool-manifests/linux32/nodejs.manifest"
+
+#####
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "exes": {
+ "python": "/tools/buildbot/bin/python",
+ "virtualenv": ["/tools/buildbot/bin/python", "/tools/misc-python/virtualenv.py"],
+ "tooltool.py": "/tools/tooltool.py",
+ },
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ ###
+ "installer_path": INSTALLER_PATH,
+ "binary_path": BINARY_PATH,
+ "xpcshell_name": XPCSHELL_NAME,
+ "exe_suffix": EXE_SUFFIX,
+ "run_file_names": {
+ "mochitest": "runtests.py",
+ "reftest": "runreftest.py",
+ "xpcshell": "runxpcshelltests.py",
+ "cppunittest": "runcppunittests.py",
+ "gtest": "rungtests.py",
+ "jittest": "jit_test.py",
+ "mozbase": "test.py",
+ "mozmill": "runtestlist.py",
+ },
+ "minimum_tests_zip_dirs": [
+ "bin/*",
+ "certs/*",
+ "config/*",
+ "mach",
+ "marionette/*",
+ "modules/*",
+ "mozbase/*",
+ "tools/*",
+ ],
+ "specific_tests_zip_dirs": {
+ "mochitest": ["mochitest/*"],
+ "reftest": ["reftest/*", "jsreftest/*"],
+ "xpcshell": ["xpcshell/*"],
+ "cppunittest": ["cppunittest/*"],
+ "gtest": ["gtest/*"],
+ "jittest": ["jit-test/*"],
+ "mozbase": ["mozbase/*"],
+ "mozmill": ["mozmill/*"],
+ },
+ "suite_definitions": {
+ "cppunittest": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--xre-path=%(abs_app_dir)s"
+ ],
+ "run_filename": "runcppunittests.py",
+ "testsdir": "cppunittest"
+ },
+ "jittest": {
+ "options": [
+ "tests/bin/js",
+ "--no-slow",
+ "--no-progress",
+ "--format=automation",
+ "--jitflags=all",
+ "--timeout=970" # Keep in sync with run_timeout below.
+ ],
+ "run_filename": "jit_test.py",
+ "testsdir": "jit-test/jit-test",
+ "run_timeout": 1000 # Keep in sync with --timeout above.
+ },
+ "mochitest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--certificate-path=tests/certs",
+ "--setpref=webgl.force-enabled=true",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--use-test-media-devices",
+ "--screenshot-on-fail",
+ "--cleanup-crashes",
+ "--marionette-startup-timeout=180",
+ ],
+ "run_filename": "runtests.py",
+ "testsdir": "mochitest"
+ },
+ "mozbase": {
+ "options": [
+ "-b",
+ "%(binary_path)s"
+ ],
+ "run_filename": "test.py",
+ "testsdir": "mozbase"
+ },
+ "mozmill": {
+ "options": [
+ "--binary=%(binary_path)s",
+ "--testing-modules-dir=test/modules",
+ "--plugins-path=%(test_plugin_path)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "runtestlist.py",
+ "testsdir": "mozmill"
+ },
+ "reftest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runreftest.py",
+ "testsdir": "reftest"
+ },
+ "xpcshell": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--test-plugin-path=%(test_plugin_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--utility-path=tests/bin",
+ ],
+ "run_filename": "runxpcshelltests.py",
+ "testsdir": "xpcshell"
+ },
+ "gtest": {
+ "options": [
+ "--xre-path=%(abs_res_dir)s",
+ "--cwd=%(gtest_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--utility-path=tests/bin",
+ "%(binary_path)s",
+ ],
+ "run_filename": "rungtests.py",
+ },
+ },
+ # local mochi suites
+ "all_mochitest_suites": {
+ "valgrind-plain": ["--valgrind=/usr/bin/valgrind",
+ "--valgrind-supp-files=" + VALGRIND_SUPP_ARCH +
+ "," + VALGRIND_SUPP_CROSS_ARCH,
+ "--timeout=900", "--max-timeouts=50"],
+ "plain": [],
+ "plain-gpu": ["--subsuite=gpu"],
+ "plain-clipboard": ["--subsuite=clipboard"],
+ "plain-chunked": ["--chunk-by-dir=4"],
+ "mochitest-media": ["--subsuite=media"],
+ "chrome": ["--flavor=chrome"],
+ "chrome-gpu": ["--flavor=chrome", "--subsuite=gpu"],
+ "chrome-clipboard": ["--flavor=chrome", "--subsuite=clipboard"],
+ "chrome-chunked": ["--flavor=chrome", "--chunk-by-dir=4"],
+ "browser-chrome": ["--flavor=browser"],
+ "browser-chrome-gpu": ["--flavor=browser", "--subsuite=gpu"],
+ "browser-chrome-clipboard": ["--flavor=browser", "--subsuite=clipboard"],
+ "browser-chrome-chunked": ["--flavor=browser", "--chunk-by-runtime"],
+ "browser-chrome-addons": ["--flavor=browser", "--chunk-by-runtime", "--tag=addons"],
+ "browser-chrome-coverage": ["--flavor=browser", "--chunk-by-runtime", "--timeout=1200"],
+ "browser-chrome-screenshots": ["--flavor=browser", "--subsuite=screenshots"],
+ "mochitest-gl": ["--subsuite=webgl"],
+ "mochitest-devtools-chrome": ["--flavor=browser", "--subsuite=devtools"],
+ "mochitest-devtools-chrome-chunked": ["--flavor=browser", "--subsuite=devtools", "--chunk-by-runtime"],
+ "mochitest-devtools-chrome-coverage": ["--flavor=browser", "--subsuite=devtools", "--chunk-by-runtime", "--timeout=1200"],
+ "jetpack-package": ["--flavor=jetpack-package"],
+ "jetpack-package-clipboard": ["--flavor=jetpack-package", "--subsuite=clipboard"],
+ "jetpack-addon": ["--flavor=jetpack-addon"],
+ "a11y": ["--flavor=a11y"],
+ },
+ # local reftest suites
+ "all_reftest_suites": {
+ "crashtest": {
+ "options": ["--suite=crashtest"],
+ "tests": ["tests/reftest/tests/testing/crashtest/crashtests.list"]
+ },
+ "jsreftest": {
+ "options": ["--extra-profile-file=tests/jsreftest/tests/user.js",
+ "--suite=jstestbrowser"],
+ "tests": ["tests/jsreftest/tests/jstests.list"]
+ },
+ "reftest": {
+ "options": ["--suite=reftest"],
+ "tests": ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ "reftest-no-accel": {
+ "options": ["--suite=reftest",
+ "--setpref=layers.acceleration.force-enabled=disabled"],
+ "tests": ["tests/reftest/tests/layout/reftests/reftest.list"]},
+ },
+ "all_xpcshell_suites": {
+ "xpcshell": {
+ "options": ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ "tests": []
+ },
+ "xpcshell-addons": {
+ "options": ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--tag=addons",
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ "tests": []
+ },
+ "xpcshell-coverage": {
+ "options": ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ "tests": []
+ },
+ },
+ "all_cppunittest_suites": {
+ "cppunittest": {"tests": ["tests/cppunittest"]}
+ },
+ "all_gtest_suites": {
+ "gtest": []
+ },
+ "all_jittest_suites": {
+ "jittest": [],
+ "jittest1": ["--total-chunks=2", "--this-chunk=1"],
+ "jittest2": ["--total-chunks=2", "--this-chunk=2"],
+ "jittest-chunked": [],
+ },
+ "all_mozbase_suites": {
+ "mozbase": []
+ },
+ "run_cmd_checks_enabled": True,
+ "preflight_run_cmd_suites": [
+ # NOTE 'enabled' is only here while we have unconsolidated configs
+ {
+ "name": "disable_screen_saver",
+ "cmd": ["xset", "s", "off", "s", "reset"],
+ "halt_on_failure": False,
+ "architectures": ["32bit", "64bit"],
+ "enabled": DISABLE_SCREEN_SAVER
+ },
+ {
+ "name": "run mouse & screen adjustment script",
+ "cmd": [
+ # when configs are consolidated this python path will only show
+ # for windows.
+ "python", "../scripts/external_tools/mouse_and_screen_resolution.py",
+ "--configuration-file",
+ "../scripts/external_tools/machine-configuration.json"],
+ "architectures": ["32bit"],
+ "halt_on_failure": True,
+ "enabled": ADJUST_MOUSE_AND_SCREEN
+ },
+ ],
+ "vcs_output_timeout": 1000,
+ "minidump_save_path": "%(abs_work_dir)s/../minidumps",
+ "buildbot_max_log_size": 52428800,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "unstructured_flavors": {"mochitest": ['jetpack'],
+ "xpcshell": [],
+ "gtest": [],
+ "mozmill": [],
+ "cppunittest": [],
+ "jittest": [],
+ "mozbase": [],
+ },
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": MINIDUMP_STACKWALK_PATH,
+ "minidump_tooltool_manifest_path": TOOLTOOL_MANIFEST_PATH,
+ "tooltool_cache": "/builds/tooltool_cache",
+ "download_nodejs": True,
+ "nodejs_path": NODEJS_PATH,
+ "nodejs_tooltool_manifest_path": NODEJS_TOOLTOOL_MANIFEST_PATH,
+}
diff --git a/testing/mozharness/configs/unittests/mac_unittest.py b/testing/mozharness/configs/unittests/mac_unittest.py
new file mode 100644
index 000000000..20bbcf9f5
--- /dev/null
+++ b/testing/mozharness/configs/unittests/mac_unittest.py
@@ -0,0 +1,257 @@
+import os
+
+# OS Specifics
+INSTALLER_PATH = os.path.join(os.getcwd(), "installer.dmg")
+XPCSHELL_NAME = 'xpcshell'
+EXE_SUFFIX = ''
+DISABLE_SCREEN_SAVER = False
+ADJUST_MOUSE_AND_SCREEN = False
+#####
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "exes": {
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ ###
+ "installer_path": INSTALLER_PATH,
+ "xpcshell_name": XPCSHELL_NAME,
+ "exe_suffix": EXE_SUFFIX,
+ "run_file_names": {
+ "mochitest": "runtests.py",
+ "reftest": "runreftest.py",
+ "xpcshell": "runxpcshelltests.py",
+ "cppunittest": "runcppunittests.py",
+ "gtest": "rungtests.py",
+ "jittest": "jit_test.py",
+ "mozbase": "test.py",
+ "mozmill": "runtestlist.py",
+ },
+ "minimum_tests_zip_dirs": [
+ "bin/*",
+ "certs/*",
+ "config/*",
+ "mach",
+ "marionette/*",
+ "modules/*",
+ "mozbase/*",
+ "tools/*",
+ ],
+ "specific_tests_zip_dirs": {
+ "mochitest": ["mochitest/*"],
+ "reftest": ["reftest/*", "jsreftest/*"],
+ "xpcshell": ["xpcshell/*"],
+ "cppunittest": ["cppunittest/*"],
+ "gtest": ["gtest/*"],
+ "jittest": ["jit-test/*"],
+ "mozbase": ["mozbase/*"],
+ "mozmill": ["mozmill/*"],
+ },
+ "suite_definitions": {
+ "cppunittest": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--xre-path=%(abs_res_dir)s"
+ ],
+ "run_filename": "runcppunittests.py",
+ "testsdir": "cppunittest"
+ },
+ "jittest": {
+ "options": [
+ "tests/bin/js",
+ "--no-slow",
+ "--no-progress",
+ "--format=automation",
+ "--jitflags=all",
+ "--timeout=970" # Keep in sync with run_timeout below.
+ ],
+ "run_filename": "jit_test.py",
+ "testsdir": "jit-test/jit-test",
+ "run_timeout": 1000 # Keep in sync with --timeout above.
+ },
+ "mochitest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--certificate-path=tests/certs",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runtests.py",
+ "testsdir": "mochitest"
+ },
+ "mozbase": {
+ "options": [
+ "-b",
+ "%(binary_path)s"
+ ],
+ "run_filename": "test.py",
+ "testsdir": "mozbase"
+ },
+ "mozmill": {
+ "options": [
+ "--binary=%(binary_path)s",
+ "--testing-modules-dir=test/modules",
+ "--plugins-path=%(test_plugin_path)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "runtestlist.py",
+ "testsdir": "mozmill"
+ },
+ "reftest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runreftest.py",
+ "testsdir": "reftest"
+ },
+ "xpcshell": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--test-plugin-path=%(test_plugin_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--utility-path=tests/bin",
+ ],
+ "run_filename": "runxpcshelltests.py",
+ "testsdir": "xpcshell"
+ },
+ "gtest": {
+ "options": [
+ "--xre-path=%(abs_res_dir)s",
+ "--cwd=%(gtest_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--utility-path=tests/bin",
+ "%(binary_path)s",
+ ],
+ "run_filename": "rungtests.py",
+ },
+ },
+ # local mochi suites
+ "all_mochitest_suites": {
+ "plain": [],
+ "plain-gpu": ["--subsuite=gpu"],
+ "plain-clipboard": ["--subsuite=clipboard"],
+ "plain-chunked": ["--chunk-by-dir=4"],
+ "mochitest-media": ["--subsuite=media"],
+ "chrome": ["--flavor=chrome"],
+ "chrome-gpu": ["--flavor=chrome", "--subsuite=gpu"],
+ "chrome-clipboard": ["--flavor=chrome", "--subsuite=clipboard"],
+ "chrome-chunked": ["--flavor=chrome", "--chunk-by-dir=4"],
+ "browser-chrome": ["--flavor=browser"],
+ "browser-chrome-gpu": ["--flavor=browser", "--subsuite=gpu"],
+ "browser-chrome-clipboard": ["--flavor=browser", "--subsuite=clipboard"],
+ "browser-chrome-chunked": ["--flavor=browser", "--chunk-by-runtime"],
+ "browser-chrome-addons": ["--flavor=browser", "--chunk-by-runtime", "--tag=addons"],
+ "browser-chrome-screenshots": ["--flavor=browser", "--subsuite=screenshots"],
+ "mochitest-gl": ["--subsuite=webgl"],
+ "mochitest-devtools-chrome": ["--flavor=browser", "--subsuite=devtools"],
+ "mochitest-devtools-chrome-chunked": ["--flavor=browser", "--subsuite=devtools", "--chunk-by-runtime"],
+ "jetpack-package": ["--flavor=jetpack-package"],
+ "jetpack-package-clipboard": ["--flavor=jetpack-package", "--subsuite=clipboard"],
+ "jetpack-addon": ["--flavor=jetpack-addon"],
+ "a11y": ["--flavor=a11y"],
+ },
+ # local reftest suites
+ "all_reftest_suites": {
+ "crashtest": {
+ 'options': ["--suite=crashtest"],
+ 'tests': ["tests/reftest/tests/testing/crashtest/crashtests.list"]
+ },
+ "jsreftest": {
+ 'options':["--extra-profile-file=tests/jsreftest/tests/user.js"],
+ 'tests': ["tests/jsreftest/tests/jstests.list"]
+ },
+ "reftest": {
+ 'options': ["--suite=reftest"],
+ 'tests': ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ },
+ "all_xpcshell_suites": {
+ "xpcshell": {
+ 'options': ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ 'tests': []
+ },
+ "xpcshell-addons": {
+ 'options': ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--tag=addons",
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ 'tests': []
+ },
+ },
+ "all_cppunittest_suites": {
+ "cppunittest": ['tests/cppunittest']
+ },
+ "all_gtest_suites": {
+ "gtest": []
+ },
+ "all_jittest_suites": {
+ "jittest": []
+ },
+ "all_mozbase_suites": {
+ "mozbase": []
+ },
+ "run_cmd_checks_enabled": True,
+ "preflight_run_cmd_suites": [
+ # NOTE 'enabled' is only here while we have unconsolidated configs
+ {
+ "name": "disable_screen_saver",
+ "cmd": ["xset", "s", "off", "s", "reset"],
+ "architectures": ["32bit", "64bit"],
+ "halt_on_failure": False,
+ "enabled": DISABLE_SCREEN_SAVER
+ },
+ {
+ "name": "run mouse & screen adjustment script",
+ "cmd": [
+ # when configs are consolidated this python path will only show
+ # for windows.
+ "python", "../scripts/external_tools/mouse_and_screen_resolution.py",
+ "--configuration-file",
+ "../scripts/external_tools/machine-configuration.json"],
+ "architectures": ["32bit"],
+ "halt_on_failure": True,
+ "enabled": ADJUST_MOUSE_AND_SCREEN
+ },
+ ],
+ "vcs_output_timeout": 1000,
+ "minidump_save_path": "%(abs_work_dir)s/../minidumps",
+ "buildbot_max_log_size": 52428800,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "unstructured_flavors": {"mochitest": ['jetpack'],
+ "xpcshell": [],
+ "gtest": [],
+ "mozmill": [],
+ "cppunittest": [],
+ "jittest": [],
+ "mozbase": [],
+ },
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": "macosx64-minidump_stackwalk",
+ "minidump_tooltool_manifest_path": "config/tooltool-manifests/macosx64/releng.manifest",
+ "tooltool_cache": "/builds/tooltool_cache",
+ "download_nodejs": True,
+ "nodejs_path": "node-osx/bin/node",
+ "nodejs_tooltool_manifest_path": "config/tooltool-manifests/macosx64/nodejs.manifest",
+}
diff --git a/testing/mozharness/configs/unittests/thunderbird_extra.py b/testing/mozharness/configs/unittests/thunderbird_extra.py
new file mode 100644
index 000000000..2021b9d55
--- /dev/null
+++ b/testing/mozharness/configs/unittests/thunderbird_extra.py
@@ -0,0 +1,17 @@
+#####
+config = {
+ "application": "thunderbird",
+ "minimum_tests_zip_dirs": [
+ "bin/*",
+ "certs/*",
+ "config/*",
+ "extensions/*",
+ "marionette/*",
+ "modules/*",
+ "mozbase/*",
+ "tools/*",
+ ],
+ "all_mozmill_suites": {
+ "mozmill": ["--list=tests/mozmill/mozmilltests.list"],
+ },
+}
diff --git a/testing/mozharness/configs/unittests/win_taskcluster_unittest.py b/testing/mozharness/configs/unittests/win_taskcluster_unittest.py
new file mode 100644
index 000000000..161e8e65e
--- /dev/null
+++ b/testing/mozharness/configs/unittests/win_taskcluster_unittest.py
@@ -0,0 +1,274 @@
+import os
+import sys
+
+# OS Specifics
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+BINARY_PATH = os.path.join(ABS_WORK_DIR, "firefox", "firefox.exe")
+INSTALLER_PATH = os.path.join(ABS_WORK_DIR, "installer.zip")
+XPCSHELL_NAME = 'xpcshell.exe'
+EXE_SUFFIX = '.exe'
+DISABLE_SCREEN_SAVER = False
+ADJUST_MOUSE_AND_SCREEN = True
+#####
+config = {
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(os.path.dirname(sys.executable), 'Lib', 'site-packages', 'virtualenv.py')
+ ],
+ 'mozinstall': ['build/venv/scripts/python', 'build/venv/scripts/mozinstall-script.py'],
+ 'tooltool.py': [sys.executable, os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')],
+ 'hg': os.path.join(os.environ['PROGRAMFILES'], 'Mercurial', 'hg')
+ },
+ ###
+ "installer_path": INSTALLER_PATH,
+ "binary_path": BINARY_PATH,
+ "xpcshell_name": XPCSHELL_NAME,
+ "virtualenv_path": 'venv',
+ "virtualenv_python_dll": os.path.join(os.path.dirname(sys.executable), "python27.dll"),
+
+ "proxxy": {},
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ "exe_suffix": EXE_SUFFIX,
+ "run_file_names": {
+ "mochitest": "runtests.py",
+ "reftest": "runreftest.py",
+ "xpcshell": "runxpcshelltests.py",
+ "cppunittest": "runcppunittests.py",
+ "gtest": "rungtests.py",
+ "jittest": "jit_test.py",
+ "mozbase": "test.py",
+ "mozmill": "runtestlist.py",
+ },
+ "minimum_tests_zip_dirs": [
+ "bin/*",
+ "certs/*",
+ "config/*",
+ "mach",
+ "marionette/*",
+ "modules/*",
+ "mozbase/*",
+ "tools/*",
+ ],
+ "specific_tests_zip_dirs": {
+ "mochitest": ["mochitest/*"],
+ "reftest": ["reftest/*", "jsreftest/*"],
+ "xpcshell": ["xpcshell/*"],
+ "cppunittest": ["cppunittest/*"],
+ "gtest": ["gtest/*"],
+ "jittest": ["jit-test/*"],
+ "mozbase": ["mozbase/*"],
+ "mozmill": ["mozmill/*"],
+ },
+ "suite_definitions": {
+ "cppunittest": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--xre-path=%(abs_app_dir)s"
+ ],
+ "run_filename": "runcppunittests.py",
+ "testsdir": "cppunittest"
+ },
+ "jittest": {
+ "options": [
+ "tests/bin/js",
+ "--no-slow",
+ "--no-progress",
+ "--format=automation",
+ "--jitflags=all",
+ "--timeout=970" # Keep in sync with run_timeout below.
+ ],
+ "run_filename": "jit_test.py",
+ "testsdir": "jit-test/jit-test",
+ "run_timeout": 1000 # Keep in sync with --timeout above.
+ },
+ "mochitest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--certificate-path=tests/certs",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runtests.py",
+ "testsdir": "mochitest"
+ },
+ "mozbase": {
+ "options": [
+ "-b",
+ "%(binary_path)s"
+ ],
+ "run_filename": "test.py",
+ "testsdir": "mozbase"
+ },
+ "mozmill": {
+ "options": [
+ "--binary=%(binary_path)s",
+ "--testing-modules-dir=test/modules",
+ "--plugins-path=%(test_plugin_path)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "runtestlist.py",
+ "testsdir": "mozmill"
+ },
+ "reftest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runreftest.py",
+ "testsdir": "reftest"
+ },
+ "xpcshell": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--test-plugin-path=%(test_plugin_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--utility-path=tests/bin",
+ ],
+ "run_filename": "runxpcshelltests.py",
+ "testsdir": "xpcshell"
+ },
+ "gtest": {
+ "options": [
+ "--xre-path=%(abs_res_dir)s",
+ "--cwd=%(gtest_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--utility-path=tests/bin",
+ "%(binary_path)s",
+ ],
+ "run_filename": "rungtests.py",
+ },
+ },
+ # local mochi suites
+ "all_mochitest_suites":
+ {
+ "plain": [],
+ "plain-gpu": ["--subsuite=gpu"],
+ "plain-clipboard": ["--subsuite=clipboard"],
+ "plain-chunked": ["--chunk-by-dir=4"],
+ "mochitest-media": ["--subsuite=media"],
+ "chrome": ["--flavor=chrome"],
+ "chrome-gpu": ["--flavor=chrome", "--subsuite=gpu"],
+ "chrome-clipboard": ["--flavor=chrome", "--subsuite=clipboard"],
+ "chrome-chunked": ["--flavor=chrome", "--chunk-by-dir=4"],
+ "browser-chrome": ["--flavor=browser"],
+ "browser-chrome-gpu": ["--flavor=browser", "--subsuite=gpu"],
+ "browser-chrome-clipboard": ["--flavor=browser", "--subsuite=clipboard"],
+ "browser-chrome-chunked": ["--flavor=browser", "--chunk-by-runtime"],
+ "browser-chrome-addons": ["--flavor=browser", "--chunk-by-runtime", "--tag=addons"],
+ "browser-chrome-screenshots": ["--flavor=browser", "--subsuite=screenshots"],
+ "mochitest-gl": ["--subsuite=webgl"],
+ "mochitest-devtools-chrome": ["--flavor=browser", "--subsuite=devtools"],
+ "mochitest-devtools-chrome-chunked": ["--flavor=browser", "--subsuite=devtools", "--chunk-by-runtime"],
+ "mochitest-metro-chrome": ["--flavor=browser", "--metro-immersive"],
+ "jetpack-package": ["--flavor=jetpack-package"],
+ "jetpack-package-clipboard": ["--flavor=jetpack-package", "--subsuite=clipboard"],
+ "jetpack-addon": ["--flavor=jetpack-addon"],
+ "a11y": ["--flavor=a11y"],
+ },
+ # local reftest suites
+ "all_reftest_suites": {
+ "crashtest": {
+ 'options': ["--suite=crashtest"],
+ 'tests': ["tests/reftest/tests/testing/crashtest/crashtests.list"]
+ },
+ "jsreftest": {
+ 'options':["--extra-profile-file=tests/jsreftest/tests/user.js"],
+ 'tests': ["tests/jsreftest/tests/jstests.list"]
+ },
+ "reftest": {
+ 'options': ["--suite=reftest"],
+ 'tests': ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ "reftest-gpu": {
+ 'options': ["--suite=reftest",
+ "--setpref=layers.gpu-process.force-enabled=true"],
+ 'tests': ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ "reftest-no-accel": {
+ "options": ["--suite=reftest",
+ "--setpref=gfx.direct2d.disabled=true",
+ "--setpref=layers.acceleration.disabled=true"],
+ "tests": ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ },
+ "all_xpcshell_suites": {
+ "xpcshell": {
+ 'options': ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ 'tests': []
+ },
+ "xpcshell-addons": {
+ 'options': ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--tag=addons",
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ 'tests': []
+ },
+ },
+ "all_cppunittest_suites": {
+ "cppunittest": ['tests/cppunittest']
+ },
+ "all_gtest_suites": {
+ "gtest": []
+ },
+ "all_jittest_suites": {
+ "jittest": []
+ },
+ "all_mozbase_suites": {
+ "mozbase": []
+ },
+ "run_cmd_checks_enabled": True,
+ "preflight_run_cmd_suites": [
+ {
+ 'name': 'disable_screen_saver',
+ 'cmd': ['xset', 's', 'off', 's', 'reset'],
+ 'architectures': ['32bit', '64bit'],
+ 'halt_on_failure': False,
+ 'enabled': DISABLE_SCREEN_SAVER
+ },
+ {
+ 'name': 'run mouse & screen adjustment script',
+ 'cmd': [
+ sys.executable,
+ os.path.join(os.getcwd(),
+ 'mozharness', 'external_tools', 'mouse_and_screen_resolution.py'),
+ '--configuration-file',
+ os.path.join(os.getcwd(),
+ 'mozharness', 'external_tools', 'machine-configuration.json')
+ ],
+ 'architectures': ['32bit'],
+ 'halt_on_failure': True,
+ 'enabled': ADJUST_MOUSE_AND_SCREEN
+ }
+ ],
+ "vcs_output_timeout": 1000,
+ "minidump_save_path": "%(abs_work_dir)s/../minidumps",
+ "buildbot_max_log_size": 52428800,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "structured_suites": ["reftest"],
+ 'blob_uploader_auth_file': 'C:/builds/oauth.txt',
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": "win32-minidump_stackwalk.exe",
+ "minidump_tooltool_manifest_path": "config/tooltool-manifests/win32/releng.manifest",
+ "download_nodejs": True,
+ "nodejs_path": "node-win32.exe",
+ "nodejs_tooltool_manifest_path": "config/tooltool-manifests/win32/nodejs.manifest",
+}
diff --git a/testing/mozharness/configs/unittests/win_unittest.py b/testing/mozharness/configs/unittests/win_unittest.py
new file mode 100644
index 000000000..caa2978c6
--- /dev/null
+++ b/testing/mozharness/configs/unittests/win_unittest.py
@@ -0,0 +1,281 @@
+import os
+import sys
+
+# OS Specifics
+ABS_WORK_DIR = os.path.join(os.getcwd(), "build")
+BINARY_PATH = os.path.join(ABS_WORK_DIR, "application", "firefox", "firefox.exe")
+INSTALLER_PATH = os.path.join(ABS_WORK_DIR, "installer.zip")
+XPCSHELL_NAME = 'xpcshell.exe'
+EXE_SUFFIX = '.exe'
+DISABLE_SCREEN_SAVER = False
+ADJUST_MOUSE_AND_SCREEN = True
+#####
+config = {
+ "buildbot_json_path": "buildprops.json",
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [sys.executable, 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ 'hg': 'c:/mozilla-build/hg/hg',
+ 'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
+ '%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
+ 'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
+ },
+ ###
+ "installer_path": INSTALLER_PATH,
+ "binary_path": BINARY_PATH,
+ "xpcshell_name": XPCSHELL_NAME,
+ "virtualenv_path": 'venv',
+ "virtualenv_python_dll": os.path.join(os.path.dirname(sys.executable), "python27.dll"),
+ "virtualenv_modules": ['pypiwin32'],
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ "exe_suffix": EXE_SUFFIX,
+ "run_file_names": {
+ "mochitest": "runtests.py",
+ "reftest": "runreftest.py",
+ "xpcshell": "runxpcshelltests.py",
+ "cppunittest": "runcppunittests.py",
+ "gtest": "rungtests.py",
+ "jittest": "jit_test.py",
+ "mozbase": "test.py",
+ "mozmill": "runtestlist.py",
+ },
+ "minimum_tests_zip_dirs": [
+ "bin/*",
+ "certs/*",
+ "config/*",
+ "mach",
+ "marionette/*",
+ "modules/*",
+ "mozbase/*",
+ "tools/*",
+ ],
+ "specific_tests_zip_dirs": {
+ "mochitest": ["mochitest/*"],
+ "reftest": ["reftest/*", "jsreftest/*"],
+ "xpcshell": ["xpcshell/*"],
+ "cppunittest": ["cppunittest/*"],
+ "gtest": ["gtest/*"],
+ "jittest": ["jit-test/*"],
+ "mozbase": ["mozbase/*"],
+ "mozmill": ["mozmill/*"],
+ },
+ "suite_definitions": {
+ "cppunittest": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--xre-path=%(abs_app_dir)s"
+ ],
+ "run_filename": "runcppunittests.py",
+ "testsdir": "cppunittest"
+ },
+ "jittest": {
+ "options": [
+ "tests/bin/js",
+ "--no-slow",
+ "--no-progress",
+ "--format=automation",
+ "--jitflags=all",
+ "--timeout=970" # Keep in sync with run_timeout below.
+ ],
+ "run_filename": "jit_test.py",
+ "testsdir": "jit-test/jit-test",
+ "run_timeout": 1000 # Keep in sync with --timeout above.
+ },
+ "mochitest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--certificate-path=tests/certs",
+ "--quiet",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--screenshot-on-fail",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runtests.py",
+ "testsdir": "mochitest"
+ },
+ "mozbase": {
+ "options": [
+ "-b",
+ "%(binary_path)s"
+ ],
+ "run_filename": "test.py",
+ "testsdir": "mozbase"
+ },
+ "mozmill": {
+ "options": [
+ "--binary=%(binary_path)s",
+ "--testing-modules-dir=test/modules",
+ "--plugins-path=%(test_plugin_path)s",
+ "--symbols-path=%(symbols_path)s"
+ ],
+ "run_filename": "runtestlist.py",
+ "testsdir": "mozmill"
+ },
+ "reftest": {
+ "options": [
+ "--appname=%(binary_path)s",
+ "--utility-path=tests/bin",
+ "--extra-profile-file=tests/bin/plugins",
+ "--symbols-path=%(symbols_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--cleanup-crashes",
+ ],
+ "run_filename": "runreftest.py",
+ "testsdir": "reftest"
+ },
+ "xpcshell": {
+ "options": [
+ "--symbols-path=%(symbols_path)s",
+ "--test-plugin-path=%(test_plugin_path)s",
+ "--log-raw=%(raw_log_file)s",
+ "--log-errorsummary=%(error_summary_file)s",
+ "--utility-path=tests/bin",
+ ],
+ "run_filename": "runxpcshelltests.py",
+ "testsdir": "xpcshell"
+ },
+ "gtest": {
+ "options": [
+ "--xre-path=%(abs_res_dir)s",
+ "--cwd=%(gtest_dir)s",
+ "--symbols-path=%(symbols_path)s",
+ "--utility-path=tests/bin",
+ "%(binary_path)s",
+ ],
+ "run_filename": "rungtests.py",
+ },
+ },
+ # local mochi suites
+ "all_mochitest_suites":
+ {
+ "plain": [],
+ "plain-gpu": ["--subsuite=gpu"],
+ "plain-clipboard": ["--subsuite=clipboard"],
+ "plain-chunked": ["--chunk-by-dir=4"],
+ "mochitest-media": ["--subsuite=media"],
+ "chrome": ["--flavor=chrome"],
+ "chrome-gpu": ["--flavor=chrome", "--subsuite=gpu"],
+ "chrome-clipboard": ["--flavor=chrome", "--subsuite=clipboard"],
+ "chrome-chunked": ["--flavor=chrome", "--chunk-by-dir=4"],
+ "browser-chrome": ["--flavor=browser"],
+ "browser-chrome-gpu": ["--flavor=browser", "--subsuite=gpu"],
+ "browser-chrome-clipboard": ["--flavor=browser", "--subsuite=clipboard"],
+ "browser-chrome-chunked": ["--flavor=browser", "--chunk-by-runtime"],
+ "browser-chrome-addons": ["--flavor=browser", "--chunk-by-runtime", "--tag=addons"],
+ "browser-chrome-screenshots": ["--flavor=browser", "--subsuite=screenshots"],
+ "mochitest-gl": ["--subsuite=webgl"],
+ "mochitest-devtools-chrome": ["--flavor=browser", "--subsuite=devtools"],
+ "mochitest-devtools-chrome-chunked": ["--flavor=browser", "--subsuite=devtools", "--chunk-by-runtime"],
+ "mochitest-metro-chrome": ["--flavor=browser", "--metro-immersive"],
+ "jetpack-package": ["--flavor=jetpack-package"],
+ "jetpack-package-clipboard": ["--flavor=jetpack-package", "--subsuite=clipboard"],
+ "jetpack-addon": ["--flavor=jetpack-addon"],
+ "a11y": ["--flavor=a11y"],
+ },
+ # local reftest suites
+ "all_reftest_suites": {
+ "crashtest": {
+ 'options': ["--suite=crashtest"],
+ 'tests': ["tests/reftest/tests/testing/crashtest/crashtests.list"]
+ },
+ "jsreftest": {
+ 'options':["--extra-profile-file=tests/jsreftest/tests/user.js"],
+ 'tests': ["tests/jsreftest/tests/jstests.list"]
+ },
+ "reftest": {
+ 'options': ["--suite=reftest"],
+ 'tests': ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ "reftest-gpu": {
+ 'options': ["--suite=reftest",
+ "--setpref=layers.gpu-process.force-enabled=true"],
+ 'tests': ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ "reftest-no-accel": {
+ "options": ["--suite=reftest",
+ "--setpref=gfx.direct2d.disabled=true",
+ "--setpref=layers.acceleration.disabled=true"],
+ "tests": ["tests/reftest/tests/layout/reftests/reftest.list"]
+ },
+ },
+ "all_xpcshell_suites": {
+ "xpcshell": {
+ 'options': ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ 'tests': []
+ },
+ "xpcshell-addons": {
+ 'options': ["--xpcshell=%(abs_app_dir)s/" + XPCSHELL_NAME,
+ "--tag=addons",
+ "--manifest=tests/xpcshell/tests/xpcshell.ini"],
+ 'tests': []
+ },
+ },
+ "all_cppunittest_suites": {
+ "cppunittest": ['tests/cppunittest']
+ },
+ "all_gtest_suites": {
+ "gtest": []
+ },
+ "all_jittest_suites": {
+ "jittest": []
+ },
+ "all_mozbase_suites": {
+ "mozbase": []
+ },
+ "run_cmd_checks_enabled": True,
+ "preflight_run_cmd_suites": [
+ # NOTE 'enabled' is only here while we have unconsolidated configs
+ {
+ "name": "disable_screen_saver",
+ "cmd": ["xset", "s", "off", "s", "reset"],
+ "architectures": ["32bit", "64bit"],
+ "halt_on_failure": False,
+ "enabled": DISABLE_SCREEN_SAVER
+ },
+ {
+ "name": "run mouse & screen adjustment script",
+ "cmd": [
+ # when configs are consolidated this python path will only show
+ # for windows.
+ sys.executable,
+ "../scripts/external_tools/mouse_and_screen_resolution.py",
+ "--configuration-file",
+ "../scripts/external_tools/machine-configuration.json"],
+ "architectures": ["32bit"],
+ "halt_on_failure": True,
+ "enabled": ADJUST_MOUSE_AND_SCREEN
+ },
+ ],
+ "vcs_output_timeout": 1000,
+ "minidump_save_path": "%(abs_work_dir)s/../minidumps",
+ "buildbot_max_log_size": 52428800,
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+ "unstructured_flavors": {"mochitest": ['jetpack'],
+ "xpcshell": [],
+ "gtest": [],
+ "mozmill": [],
+ "cppunittest": [],
+ "jittest": [],
+ "mozbase": [],
+ },
+ "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+ "download_minidump_stackwalk": True,
+ "minidump_stackwalk_path": "win32-minidump_stackwalk.exe",
+ "minidump_tooltool_manifest_path": "config/tooltool-manifests/win32/releng.manifest",
+ "download_nodejs": True,
+ "nodejs_path": "node-win32.exe",
+ "nodejs_tooltool_manifest_path": "config/tooltool-manifests/win32/nodejs.manifest",
+}
diff --git a/testing/mozharness/configs/users/aki/gaia_json.py b/testing/mozharness/configs/users/aki/gaia_json.py
new file mode 100644
index 000000000..4263cc908
--- /dev/null
+++ b/testing/mozharness/configs/users/aki/gaia_json.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+config = {
+ "log_name": "gaia_bump",
+ "log_max_rotate": 99,
+ "ssh_key": "~/.ssh/id_rsa",
+ "ssh_user": "asasaki@mozilla.com",
+ "hg_user": "Test Pusher <aki@escapewindow.com>",
+ "revision_file": "b2g/config/gaia.json",
+ "exes": {
+ # Get around the https warnings
+ "hg": ['hg', "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"],
+ },
+ "repo_list": [{
+ "polling_url": "https://hg.mozilla.org/integration/gaia-central/json-pushes?full=1",
+ "branch": "default",
+ "repo_url": "https://hg.mozilla.org/integration/gaia-central",
+ "repo_name": "gaia-central",
+ "target_push_url": "ssh://hg.mozilla.org/users/asasaki_mozilla.com/birch",
+ "target_pull_url": "https://hg.mozilla.org/users/asasaki_mozilla.com/birch",
+ "target_tag": "default",
+ "target_repo_name": "birch",
+ }, {
+ "polling_url": "https://hg.mozilla.org/integration/gaia-1_2/json-pushes?full=1",
+ "branch": "default",
+ "repo_url": "https://hg.mozilla.org/integration/gaia-1_2",
+ "repo_name": "gaia-1_2",
+ "target_push_url": "ssh://hg.mozilla.org/users/asasaki_mozilla.com/mozilla-aurora",
+ "target_pull_url": "https://hg.mozilla.org/users/asasaki_mozilla.com/mozilla-aurora",
+ "target_tag": "default",
+ "target_repo_name": "mozilla-aurora",
+ }, {
+ "polling_url": "https://hg.mozilla.org/integration/gaia-1_2/json-pushes?full=1",
+ "branch": "default",
+ "repo_url": "https://hg.mozilla.org/integration/gaia-1_2",
+ "repo_name": "gaia-1_2",
+ "target_push_url": "ssh://hg.mozilla.org/users/asasaki_mozilla.com/mozilla-aurora",
+ "target_pull_url": "https://hg.mozilla.org/users/asasaki_mozilla.com/mozilla-aurora",
+ "target_tag": "default",
+ "target_repo_name": "mozilla-aurora",
+ }],
+}
diff --git a/testing/mozharness/configs/users/sfink/mock.py b/testing/mozharness/configs/users/sfink/mock.py
new file mode 100644
index 000000000..07b5c5c43
--- /dev/null
+++ b/testing/mozharness/configs/users/sfink/mock.py
@@ -0,0 +1,3 @@
+config = {
+ "mock_target": "mozilla-centos6-x86_64",
+}
diff --git a/testing/mozharness/configs/users/sfink/spidermonkey.py b/testing/mozharness/configs/users/sfink/spidermonkey.py
new file mode 100644
index 000000000..efbc9a805
--- /dev/null
+++ b/testing/mozharness/configs/users/sfink/spidermonkey.py
@@ -0,0 +1,38 @@
+# This config file is for locally testing spidermonkey_build.py. It provides
+# the values that would otherwise be provided by buildbot.
+
+BRANCH = "local-src"
+HOME = "/home/sfink"
+REPO = HOME + "/src/MI-GC"
+
+config = {
+ "hgurl": "https://hg.mozilla.org/",
+ "python": "python",
+ "sixgill": HOME + "/src/sixgill",
+ "sixgill_bin": HOME + "/src/sixgill/bin",
+
+ "repo": REPO,
+ "repos": [{
+ "repo": REPO,
+ "branch": "default",
+ "dest": BRANCH,
+ }, {
+ "repo": "https://hg.mozilla.org/build/tools",
+ "branch": "default",
+ "dest": "tools"
+ }],
+
+ "tools_dir": "/tools",
+
+ "mock_target": "mozilla-centos6-x86_64",
+
+ "upload_remote_basepath": "/tmp/upload-base",
+ "upload_ssh_server": "localhost",
+ "upload_ssh_key": "/home/sfink/.ssh/id_rsa",
+ "upload_ssh_user": "sfink",
+ "upload_label": "linux64-br-haz",
+
+ # For testing tryserver uploads (directory structure is different)
+ #"branch": "try",
+ #"revision": "deadbeef1234",
+}
diff --git a/testing/mozharness/configs/web_platform_tests/prod_config.py b/testing/mozharness/configs/web_platform_tests/prod_config.py
new file mode 100644
index 000000000..f0fb0b074
--- /dev/null
+++ b/testing/mozharness/configs/web_platform_tests/prod_config.py
@@ -0,0 +1,47 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import os
+
+config = {
+ "options": [
+ "--prefs-root=%(test_path)s/prefs",
+ "--processes=1",
+ "--config=%(test_path)s/wptrunner.ini",
+ "--ca-cert-path=%(test_path)s/certs/cacert.pem",
+ "--host-key-path=%(test_path)s/certs/web-platform.test.key",
+ "--host-cert-path=%(test_path)s/certs/web-platform.test.pem",
+ "--certutil-binary=%(test_install_path)s/bin/certutil",
+ ],
+
+ "exes": {
+ 'python': '/tools/buildbot/bin/python',
+ 'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
+ 'tooltool.py': "/tools/tooltool.py",
+ },
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+
+ "pip_index": False,
+
+ "buildbot_json_path": "buildprops.json",
+
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+
+ "download_minidump_stackwalk": True,
+
+ "download_tooltool": True,
+
+ "tooltool_cache": "/builds/tooltool_cache",
+
+}
+
diff --git a/testing/mozharness/configs/web_platform_tests/prod_config_windows.py b/testing/mozharness/configs/web_platform_tests/prod_config_windows.py
new file mode 100644
index 000000000..7c0f525fe
--- /dev/null
+++ b/testing/mozharness/configs/web_platform_tests/prod_config_windows.py
@@ -0,0 +1,48 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+# This is a template config file for web-platform-tests test.
+
+import os
+import sys
+
+config = {
+ "options": [
+ "--prefs-root=%(test_path)s/prefs",
+ "--processes=1",
+ "--config=%(test_path)s/wptrunner.ini",
+ "--ca-cert-path=%(test_path)s/certs/cacert.pem",
+ "--host-key-path=%(test_path)s/certs/web-platform.test.key",
+ "--host-cert-path=%(test_path)s/certs/web-platform.test.pem",
+ "--certutil-binary=%(test_install_path)s/bin/certutil",
+ ],
+
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [sys.executable, 'c:/mozilla-build/buildbotve/virtualenv.py'],
+ 'hg': 'c:/mozilla-build/hg/hg',
+ 'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
+ '%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
+ 'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
+ },
+
+ "find_links": [
+ "http://pypi.pvt.build.mozilla.org/pub",
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+
+ "pip_index": False,
+
+ "buildbot_json_path": "buildprops.json",
+
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+
+ "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
+
+ "download_minidump_stackwalk": True,
+}
diff --git a/testing/mozharness/configs/web_platform_tests/prod_config_windows_taskcluster.py b/testing/mozharness/configs/web_platform_tests/prod_config_windows_taskcluster.py
new file mode 100644
index 000000000..845c66f76
--- /dev/null
+++ b/testing/mozharness/configs/web_platform_tests/prod_config_windows_taskcluster.py
@@ -0,0 +1,48 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+# This is a template config file for web-platform-tests test.
+
+import os
+import sys
+
+config = {
+ "options": [
+ "--prefs-root=%(test_path)s/prefs",
+ "--processes=1",
+ "--config=%(test_path)s/wptrunner.ini",
+ "--ca-cert-path=%(test_path)s/certs/cacert.pem",
+ "--host-key-path=%(test_path)s/certs/web-platform.test.key",
+ "--host-cert-path=%(test_path)s/certs/web-platform.test.pem",
+ "--certutil-binary=%(test_install_path)s/bin/certutil",
+ ],
+
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [
+ sys.executable,
+ os.path.join(os.path.dirname(sys.executable), 'Lib', 'site-packages', 'virtualenv.py')
+ ],
+ 'mozinstall': ['build/venv/scripts/python', 'build/venv/scripts/mozinstall-script.py'],
+ 'tooltool.py': [sys.executable, os.path.join(os.environ['MOZILLABUILD'], 'tooltool.py')],
+ 'hg': os.path.join(os.environ['PROGRAMFILES'], 'Mercurial', 'hg')
+ },
+
+ "proxxy": {},
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+
+ "pip_index": False,
+
+ "default_blob_upload_servers": [
+ "https://blobupload.elasticbeanstalk.com",
+ ],
+
+ "blob_uploader_auth_file" : 'C:/builds/oauth.txt',
+
+ "download_minidump_stackwalk": True,
+}
diff --git a/testing/mozharness/configs/web_platform_tests/test_config.py b/testing/mozharness/configs/web_platform_tests/test_config.py
new file mode 100644
index 000000000..29dd8014b
--- /dev/null
+++ b/testing/mozharness/configs/web_platform_tests/test_config.py
@@ -0,0 +1,32 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+config = {
+ "options": [
+ "--prefs-root=%(test_path)s/prefs",
+ "--processes=1",
+ "--config=%(test_path)s/wptrunner.ini",
+ "--ca-cert-path=%(test_path)s/certs/cacert.pem",
+ "--host-key-path=%(test_path)s/certs/web-platform.test.key",
+ "--host-cert-path=%(test_path)s/certs/web-platform.test.pem",
+ "--certutil-binary=%(test_install_path)s/bin/certutil",
+ ],
+
+ "default_actions": [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'pull',
+ 'install',
+ 'run-tests',
+ ],
+
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+
+ "pip_index": False,
+}
diff --git a/testing/mozharness/configs/web_platform_tests/test_config_windows.py b/testing/mozharness/configs/web_platform_tests/test_config_windows.py
new file mode 100644
index 000000000..d83c136ea
--- /dev/null
+++ b/testing/mozharness/configs/web_platform_tests/test_config_windows.py
@@ -0,0 +1,43 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+config = {
+ "options": [
+ "--prefs-root=%(test_path)s/prefs",
+ "--processes=1",
+ "--config=%(test_path)s/wptrunner.ini",
+ "--ca-cert-path=%(test_path)s/certs/cacert.pem",
+ "--host-key-path=%(test_path)s/certs/web-platform.test.key",
+ "--host-cert-path=%(test_path)s/certs/web-platform.test.pem",
+ "--certutil-binary=%(test_install_path)s/bin/certutil",
+ ],
+
+ "exes": {
+ 'python': sys.executable,
+ 'virtualenv': [sys.executable, 'c:/mozilla-source/cedar/python/virtualenv/virtualenv.py'], #'c:/mozilla-build/buildbotve/virtualenv.py'],
+ 'hg': 'c:/mozilla-build/hg/hg',
+ 'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
+ '%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
+ },
+
+ "default_actions": [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'pull',
+ 'install',
+ 'run-tests',
+ ],
+
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+
+ "pip_index": False,
+}
diff --git a/testing/mozharness/docs/Makefile b/testing/mozharness/docs/Makefile
new file mode 100644
index 000000000..980ffbd3b
--- /dev/null
+++ b/testing/mozharness/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MozHarness.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MozHarness.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/MozHarness"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MozHarness"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/testing/mozharness/docs/android_emulator_build.rst b/testing/mozharness/docs/android_emulator_build.rst
new file mode 100644
index 000000000..4087c64d4
--- /dev/null
+++ b/testing/mozharness/docs/android_emulator_build.rst
@@ -0,0 +1,7 @@
+android_emulator_build module
+=============================
+
+.. automodule:: android_emulator_build
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/android_emulator_unittest.rst b/testing/mozharness/docs/android_emulator_unittest.rst
new file mode 100644
index 000000000..7a8c42c50
--- /dev/null
+++ b/testing/mozharness/docs/android_emulator_unittest.rst
@@ -0,0 +1,7 @@
+android_emulator_unittest module
+================================
+
+.. automodule:: android_emulator_unittest
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/bouncer_submitter.rst b/testing/mozharness/docs/bouncer_submitter.rst
new file mode 100644
index 000000000..5b71caca7
--- /dev/null
+++ b/testing/mozharness/docs/bouncer_submitter.rst
@@ -0,0 +1,8 @@
+bouncer_submitter module
+========================
+
+.. automodule:: bouncer_submitter
+ :members:
+ :undoc-members:
+ :private-members:
+ :special-members:
diff --git a/testing/mozharness/docs/bump_gaia_json.rst b/testing/mozharness/docs/bump_gaia_json.rst
new file mode 100644
index 000000000..81b84d3a9
--- /dev/null
+++ b/testing/mozharness/docs/bump_gaia_json.rst
@@ -0,0 +1,7 @@
+bump_gaia_json module
+=====================
+
+.. automodule:: bump_gaia_json
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/conf.py b/testing/mozharness/docs/conf.py
new file mode 100644
index 000000000..e18c868a0
--- /dev/null
+++ b/testing/mozharness/docs/conf.py
@@ -0,0 +1,268 @@
+# -*- coding: utf-8 -*-
+#
+# Moz Harness documentation build configuration file, created by
+# sphinx-quickstart on Mon Apr 14 17:35:24 2014.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath('../scripts'))
+sys.path.insert(0, os.path.abspath('../mozharness'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.viewcode',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Moz Harness'
+copyright = u'2014, aki and a cast of tens!'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.1'
+# The full version, including alpha/beta/rc tags.
+release = '0.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'MozHarnessdoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ ('index', 'MozHarness.tex', u'Moz Harness Documentation',
+ u'aki and a cast of tens!', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'mozharness', u'Moz Harness Documentation',
+ [u'aki and a cast of tens!'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'MozHarness', u'Moz Harness Documentation',
+ u'aki and a cast of tens!', 'MozHarness', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/testing/mozharness/docs/configtest.rst b/testing/mozharness/docs/configtest.rst
new file mode 100644
index 000000000..10e4a56c9
--- /dev/null
+++ b/testing/mozharness/docs/configtest.rst
@@ -0,0 +1,7 @@
+configtest module
+=================
+
+.. automodule:: configtest
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/desktop_l10n.rst b/testing/mozharness/docs/desktop_l10n.rst
new file mode 100644
index 000000000..b94dadedc
--- /dev/null
+++ b/testing/mozharness/docs/desktop_l10n.rst
@@ -0,0 +1,7 @@
+desktop_l10n module
+===================
+
+.. automodule:: desktop_l10n
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/desktop_unittest.rst b/testing/mozharness/docs/desktop_unittest.rst
new file mode 100644
index 000000000..f70e8d8d9
--- /dev/null
+++ b/testing/mozharness/docs/desktop_unittest.rst
@@ -0,0 +1,7 @@
+desktop_unittest module
+=======================
+
+.. automodule:: desktop_unittest
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/fx_desktop_build.rst b/testing/mozharness/docs/fx_desktop_build.rst
new file mode 100644
index 000000000..b5d6ac21c
--- /dev/null
+++ b/testing/mozharness/docs/fx_desktop_build.rst
@@ -0,0 +1,7 @@
+fx_desktop_build module
+=======================
+
+.. automodule:: fx_desktop_build
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/gaia_build_integration.rst b/testing/mozharness/docs/gaia_build_integration.rst
new file mode 100644
index 000000000..a2c15204c
--- /dev/null
+++ b/testing/mozharness/docs/gaia_build_integration.rst
@@ -0,0 +1,7 @@
+gaia_build_integration module
+=============================
+
+.. automodule:: gaia_build_integration
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/gaia_integration.rst b/testing/mozharness/docs/gaia_integration.rst
new file mode 100644
index 000000000..da143919a
--- /dev/null
+++ b/testing/mozharness/docs/gaia_integration.rst
@@ -0,0 +1,7 @@
+gaia_integration module
+=======================
+
+.. automodule:: gaia_integration
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/gaia_unit.rst b/testing/mozharness/docs/gaia_unit.rst
new file mode 100644
index 000000000..9212b288c
--- /dev/null
+++ b/testing/mozharness/docs/gaia_unit.rst
@@ -0,0 +1,7 @@
+gaia_unit module
+================
+
+.. automodule:: gaia_unit
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/index.rst b/testing/mozharness/docs/index.rst
new file mode 100644
index 000000000..e2c05d34a
--- /dev/null
+++ b/testing/mozharness/docs/index.rst
@@ -0,0 +1,24 @@
+.. Moz Harness documentation master file, created by
+ sphinx-quickstart on Mon Apr 14 17:35:24 2014.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to Moz Harness's documentation!
+=======================================
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ modules.rst
+ scripts.rst
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/testing/mozharness/docs/marionette.rst b/testing/mozharness/docs/marionette.rst
new file mode 100644
index 000000000..28763406b
--- /dev/null
+++ b/testing/mozharness/docs/marionette.rst
@@ -0,0 +1,7 @@
+marionette module
+=================
+
+.. automodule:: marionette
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mobile_l10n.rst b/testing/mozharness/docs/mobile_l10n.rst
new file mode 100644
index 000000000..ed53d09d3
--- /dev/null
+++ b/testing/mozharness/docs/mobile_l10n.rst
@@ -0,0 +1,7 @@
+mobile_l10n module
+==================
+
+.. automodule:: mobile_l10n
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mobile_partner_repack.rst b/testing/mozharness/docs/mobile_partner_repack.rst
new file mode 100644
index 000000000..f8be0bef8
--- /dev/null
+++ b/testing/mozharness/docs/mobile_partner_repack.rst
@@ -0,0 +1,7 @@
+mobile_partner_repack module
+============================
+
+.. automodule:: mobile_partner_repack
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/modules.rst b/testing/mozharness/docs/modules.rst
new file mode 100644
index 000000000..73652563b
--- /dev/null
+++ b/testing/mozharness/docs/modules.rst
@@ -0,0 +1,13 @@
+mozharness
+==========
+
+.. toctree::
+ :maxdepth: 4
+
+ mozharness
+ mozharness.base.rst
+ mozharness.base.vcs.rst
+ mozharness.mozilla.building.rst
+ mozharness.mozilla.l10n.rst
+ mozharness.mozilla.rst
+ mozharness.mozilla.testing.rst
diff --git a/testing/mozharness/docs/mozharness.base.rst b/testing/mozharness/docs/mozharness.base.rst
new file mode 100644
index 000000000..923e5658d
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.base.rst
@@ -0,0 +1,101 @@
+mozharness.base package
+=======================
+
+Subpackages
+-----------
+
+.. toctree::
+
+ mozharness.base.vcs
+
+Submodules
+----------
+
+mozharness.base.config module
+-----------------------------
+
+.. automodule:: mozharness.base.config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.errors module
+-----------------------------
+
+.. automodule:: mozharness.base.errors
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.gaia_test module
+--------------------------------
+
+.. automodule:: mozharness.base.gaia_test
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.log module
+--------------------------
+
+.. automodule:: mozharness.base.log
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.mar module
+--------------------------
+
+.. automodule:: mozharness.base.mar
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.parallel module
+-------------------------------
+
+.. automodule:: mozharness.base.parallel
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.python module
+-----------------------------
+
+.. automodule:: mozharness.base.python
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.script module
+-----------------------------
+
+.. automodule:: mozharness.base.script
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.signing module
+------------------------------
+
+.. automodule:: mozharness.base.signing
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.transfer module
+-------------------------------
+
+.. automodule:: mozharness.base.transfer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: mozharness.base
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mozharness.base.vcs.rst b/testing/mozharness/docs/mozharness.base.vcs.rst
new file mode 100644
index 000000000..f262b3f7a
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.base.vcs.rst
@@ -0,0 +1,46 @@
+mozharness.base.vcs package
+===========================
+
+Submodules
+----------
+
+mozharness.base.vcs.gittool module
+----------------------------------
+
+.. automodule:: mozharness.base.vcs.gittool
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.vcs.mercurial module
+------------------------------------
+
+.. automodule:: mozharness.base.vcs.mercurial
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.vcs.vcsbase module
+----------------------------------
+
+.. automodule:: mozharness.base.vcs.vcsbase
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.base.vcs.vcssync module
+----------------------------------
+
+.. automodule:: mozharness.base.vcs.vcssync
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: mozharness.base.vcs
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mozharness.mozilla.building.rst b/testing/mozharness/docs/mozharness.mozilla.building.rst
new file mode 100644
index 000000000..b8b6106c2
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.mozilla.building.rst
@@ -0,0 +1,22 @@
+mozharness.mozilla.building package
+===================================
+
+Submodules
+----------
+
+mozharness.mozilla.building.buildbase module
+--------------------------------------------
+
+.. automodule:: mozharness.mozilla.building.buildbase
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: mozharness.mozilla.building
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mozharness.mozilla.l10n.rst b/testing/mozharness/docs/mozharness.mozilla.l10n.rst
new file mode 100644
index 000000000..6951ec1a7
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.mozilla.l10n.rst
@@ -0,0 +1,30 @@
+mozharness.mozilla.l10n package
+===============================
+
+Submodules
+----------
+
+mozharness.mozilla.l10n.locales module
+--------------------------------------
+
+.. automodule:: mozharness.mozilla.l10n.locales
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.l10n.multi_locale_build module
+-------------------------------------------------
+
+.. automodule:: mozharness.mozilla.l10n.multi_locale_build
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: mozharness.mozilla.l10n
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mozharness.mozilla.rst b/testing/mozharness/docs/mozharness.mozilla.rst
new file mode 100644
index 000000000..2a869db7b
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.mozilla.rst
@@ -0,0 +1,111 @@
+mozharness.mozilla package
+==========================
+
+Subpackages
+-----------
+
+.. toctree::
+
+ mozharness.mozilla.building
+ mozharness.mozilla.l10n
+ mozharness.mozilla.testing
+
+Submodules
+----------
+
+mozharness.mozilla.blob_upload module
+-------------------------------------
+
+.. automodule:: mozharness.mozilla.blob_upload
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.buildbot module
+----------------------------------
+
+.. automodule:: mozharness.mozilla.buildbot
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.gaia module
+------------------------------
+
+.. automodule:: mozharness.mozilla.gaia
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.mapper module
+--------------------------------
+
+.. automodule:: mozharness.mozilla.mapper
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.mock module
+------------------------------
+
+.. automodule:: mozharness.mozilla.mock
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.mozbase module
+---------------------------------
+
+.. automodule:: mozharness.mozilla.mozbase
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.purge module
+-------------------------------
+
+.. automodule:: mozharness.mozilla.purge
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.release module
+---------------------------------
+
+.. automodule:: mozharness.mozilla.release
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.repo_manifest module
+---------------------------------------
+
+.. automodule:: mozharness.mozilla.repo_manifest
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.signing module
+---------------------------------
+
+.. automodule:: mozharness.mozilla.signing
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.tooltool module
+----------------------------------
+
+.. automodule:: mozharness.mozilla.tooltool
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: mozharness.mozilla
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mozharness.mozilla.testing.rst b/testing/mozharness/docs/mozharness.mozilla.testing.rst
new file mode 100644
index 000000000..ccb57a3dd
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.mozilla.testing.rst
@@ -0,0 +1,62 @@
+mozharness.mozilla.testing package
+==================================
+
+Submodules
+----------
+
+mozharness.mozilla.testing.device module
+----------------------------------------
+
+.. automodule:: mozharness.mozilla.testing.device
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.testing.errors module
+----------------------------------------
+
+.. automodule:: mozharness.mozilla.testing.errors
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.testing.mozpool module
+-----------------------------------------
+
+.. automodule:: mozharness.mozilla.testing.mozpool
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.testing.talos module
+---------------------------------------
+
+.. automodule:: mozharness.mozilla.testing.talos
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.testing.testbase module
+------------------------------------------
+
+.. automodule:: mozharness.mozilla.testing.testbase
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+mozharness.mozilla.testing.unittest module
+------------------------------------------
+
+.. automodule:: mozharness.mozilla.testing.unittest
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Module contents
+---------------
+
+.. automodule:: mozharness.mozilla.testing
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/mozharness.rst b/testing/mozharness/docs/mozharness.rst
new file mode 100644
index 000000000..f14e6b91e
--- /dev/null
+++ b/testing/mozharness/docs/mozharness.rst
@@ -0,0 +1,18 @@
+mozharness package
+==================
+
+Subpackages
+-----------
+
+.. toctree::
+
+ mozharness.base
+ mozharness.mozilla
+
+Module contents
+---------------
+
+.. automodule:: mozharness
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/multil10n.rst b/testing/mozharness/docs/multil10n.rst
new file mode 100644
index 000000000..b14e62b78
--- /dev/null
+++ b/testing/mozharness/docs/multil10n.rst
@@ -0,0 +1,7 @@
+multil10n module
+================
+
+.. automodule:: multil10n
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/scripts.rst b/testing/mozharness/docs/scripts.rst
new file mode 100644
index 000000000..b5258457e
--- /dev/null
+++ b/testing/mozharness/docs/scripts.rst
@@ -0,0 +1,22 @@
+scripts
+=======
+
+.. toctree::
+ android_emulator_build.rst
+ android_emulator_unittest.rst
+ bouncer_submitter.rst
+ bump_gaia_json.rst
+ configtest.rst
+ desktop_l10n.rst
+ desktop_unittest.rst
+ fx_desktop_build.rst
+ gaia_build_integration.rst
+ gaia_integration.rst
+ gaia_unit.rst
+ marionette.rst
+ mobile_l10n.rst
+ mobile_partner_repack.rst
+ multil10n.rst
+ spidermonkey_build.rst
+ talos_script.rst
+ web_platform_tests.rst
diff --git a/testing/mozharness/docs/spidermonkey_build.rst b/testing/mozharness/docs/spidermonkey_build.rst
new file mode 100644
index 000000000..7e73c672e
--- /dev/null
+++ b/testing/mozharness/docs/spidermonkey_build.rst
@@ -0,0 +1,7 @@
+spidermonkey_build module
+=========================
+
+.. automodule:: spidermonkey_build
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/talos_script.rst b/testing/mozharness/docs/talos_script.rst
new file mode 100644
index 000000000..509aac400
--- /dev/null
+++ b/testing/mozharness/docs/talos_script.rst
@@ -0,0 +1,7 @@
+talos_script module
+===================
+
+.. automodule:: talos_script
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/docs/web_platform_tests.rst b/testing/mozharness/docs/web_platform_tests.rst
new file mode 100644
index 000000000..6a2887aa8
--- /dev/null
+++ b/testing/mozharness/docs/web_platform_tests.rst
@@ -0,0 +1,7 @@
+web_platform_tests module
+=========================
+
+.. automodule:: web_platform_tests
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/testing/mozharness/examples/action_config_script.py b/testing/mozharness/examples/action_config_script.py
new file mode 100755
index 000000000..e1135771e
--- /dev/null
+++ b/testing/mozharness/examples/action_config_script.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python -u
+"""action_config_script.py
+
+Demonstrate actions and config.
+"""
+
+import os
+import sys
+import time
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+
+
+# ActionsConfigExample {{{1
+class ActionsConfigExample(BaseScript):
+ config_options = [[
+ ['--beverage', ],
+ {"action": "store",
+ "dest": "beverage",
+ "type": "string",
+ "help": "Specify your beverage of choice",
+ }
+ ], [
+ ['--ship-style', ],
+ {"action": "store",
+ "dest": "ship_style",
+ "type": "choice",
+ "choices": ["1", "2", "3"],
+ "help": "Specify the type of ship",
+ }
+ ], [
+ ['--long-sleep-time', ],
+ {"action": "store",
+ "dest": "long_sleep_time",
+ "type": "int",
+ "help": "Specify how long to sleep",
+ }
+ ]]
+
+ def __init__(self, require_config_file=False):
+ super(ActionsConfigExample, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'nap',
+ 'ship-it',
+ ],
+ default_actions=[
+ 'clobber',
+ 'nap',
+ 'ship-it',
+ ],
+ require_config_file=require_config_file,
+ config={
+ 'beverage': "kool-aid",
+ 'long_sleep_time': 3600,
+ 'ship_style': "1",
+ }
+ )
+
+ def _sleep(self, sleep_length, interval=5):
+ self.info("Sleeping %d seconds..." % sleep_length)
+ counter = 0
+ while counter + interval <= sleep_length:
+ sys.stdout.write(".")
+ try:
+ time.sleep(interval)
+ except:
+ print
+ self.error("Impatient, are we?")
+ sys.exit(1)
+ counter += interval
+ print
+ self.info("Ok, done.")
+
+ def _ship1(self):
+ self.info("""
+ _~
+ _~ )_)_~
+ )_))_))_)
+ _!__!__!_
+ \______t/
+~~~~~~~~~~~~~
+""")
+
+ def _ship2(self):
+ self.info("""
+ _4 _4
+ _)_))_)
+ _)_)_)_)
+ _)_))_))_)_
+ \_=__=__=_/
+~~~~~~~~~~~~~
+""")
+
+ def _ship3(self):
+ self.info("""
+ ,;;:;,
+ ;;;;;
+ ,:;;:; ,'=.
+ ;:;:;' .=" ,'_\\
+ ':;:;,/ ,__:=@
+ ';;:; =./)_
+ `"=\\_ )_"`
+ ``'"`
+""")
+
+ def nap(self):
+ for var_name in self.config.keys():
+ if var_name.startswith("random_config_key"):
+ self.info("This is going to be %s!" % self.config[var_name])
+ sleep_time = self.config['long_sleep_time']
+ if sleep_time > 60:
+ self.info("Ok, grab a %s. This is going to take a while." % self.config['beverage'])
+ else:
+ self.info("This will be quick, but grab a %s anyway." % self.config['beverage'])
+ self._sleep(self.config['long_sleep_time'])
+
+ def ship_it(self):
+ name = "_ship%s" % self.config['ship_style']
+ if hasattr(self, name):
+ getattr(self, name)()
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ actions_config_example = ActionsConfigExample()
+ actions_config_example.run_and_exit()
diff --git a/testing/mozharness/examples/silent_script.py b/testing/mozharness/examples/silent_script.py
new file mode 100755
index 000000000..c73298ed7
--- /dev/null
+++ b/testing/mozharness/examples/silent_script.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+""" This script is an example of why I care so much about Mozharness' 2nd core
+concept, logging. http://escapewindow.dreamwidth.org/230853.html
+"""
+
+import os
+import shutil
+
+#print "downloading foo.tar.bz2..."
+os.system("curl -s -o foo.tar.bz2 http://people.mozilla.org/~asasaki/foo.tar.bz2")
+#os.system("curl -v -o foo.tar.bz2 http://people.mozilla.org/~asasaki/foo.tar.bz2")
+
+#os.rename("foo.tar.bz2", "foo3.tar.bz2")
+os.system("tar xjf foo.tar.bz2")
+
+#os.chdir("x")
+os.remove("x/ship2")
+os.remove("foo.tar.bz2")
+os.system("tar cjf foo.tar.bz2 x")
+shutil.rmtree("x")
+#os.system("scp -q foo.tar.bz2 people.mozilla.org:public_html/foo2.tar.bz2")
+os.remove("foo.tar.bz2")
diff --git a/testing/mozharness/examples/venv.py b/testing/mozharness/examples/venv.py
new file mode 100755
index 000000000..6b3c88f96
--- /dev/null
+++ b/testing/mozharness/examples/venv.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""venv.py
+
+Test virtualenv creation. This installs talos in the local venv; that's it.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import PythonErrorList
+from mozharness.base.python import virtualenv_config_options, VirtualenvMixin
+from mozharness.base.script import BaseScript
+
+# VirtualenvExample {{{1
+class VirtualenvExample(VirtualenvMixin, BaseScript):
+ config_options = [[
+ ["--talos-url"],
+ {"action": "store",
+ "dest": "talos_url",
+ "default": "https://hg.mozilla.org/build/talos/archive/tip.tar.gz",
+ "help": "Specify the talos pip url"
+ }
+ ]] + virtualenv_config_options
+
+ def __init__(self, require_config_file=False):
+ super(VirtualenvExample, self).__init__(
+ config_options=self.config_options,
+ all_actions=['create-virtualenv',
+ ],
+ default_actions=['create-virtualenv',
+ ],
+ require_config_file=require_config_file,
+ config={"virtualenv_modules": ["talos"]},
+ )
+
+# __main__ {{{1
+if __name__ == '__main__':
+ venv_example = VirtualenvExample()
+ venv_example.run_and_exit()
diff --git a/testing/mozharness/examples/verbose_script.py b/testing/mozharness/examples/verbose_script.py
new file mode 100755
index 000000000..e8afd7567
--- /dev/null
+++ b/testing/mozharness/examples/verbose_script.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""verbose_script.py
+
+Contrast to silent_script.py.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+#from mozharness.base.errors import TarErrorList, SSHErrorList
+from mozharness.base.script import BaseScript
+
+
+# VerboseExample {{{1
+class VerboseExample(BaseScript):
+ def __init__(self, require_config_file=False):
+ super(VerboseExample, self).__init__(
+ all_actions=['verbosity', ],
+ require_config_file=require_config_file,
+ config={"tarball_name": "bar.tar.bz2"}
+ )
+
+ def verbosity(self):
+ tarball_name = self.config["tarball_name"]
+ self.download_file(
+ "http://people.mozilla.org/~asasaki/foo.tar.bz2",
+ file_name=tarball_name
+ )
+ # the error_list adds more error checking.
+ # the halt_on_failure will kill the script at this point if
+ # unsuccessful. Be aware if you need to do any cleanup before you
+ # actually fatal(), though. If so, you may want to either use an
+ # |if self.run_command(...):| construct, or define a self._post_fatal()
+ # for a generic end-of-fatal-run method.
+ self.run_command(
+ ["tar", "xjvf", tarball_name],
+# error_list=TarErrorList,
+# halt_on_failure=True,
+# fatal_exit_code=3,
+ )
+ self.rmtree("x/ship2")
+ self.rmtree(tarball_name)
+ self.run_command(
+ ["tar", "cjvf", tarball_name, "x"],
+# error_list=TarErrorList,
+# halt_on_failure=True,
+# fatal_exit_code=3,
+ )
+ self.rmtree("x")
+ if self.run_command(
+ ["scp", tarball_name, "people.mozilla.org:public_html/foo2.tar.bz2"],
+# error_list=SSHErrorList,
+ ):
+ self.error("There's been a problem with the scp. We're going to proceed anyway.")
+ self.rmtree(tarball_name)
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ verbose_example = VerboseExample()
+ verbose_example.run_and_exit()
diff --git a/testing/mozharness/external_tools/__init__.py b/testing/mozharness/external_tools/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/external_tools/__init__.py
diff --git a/testing/mozharness/external_tools/clobberer.py b/testing/mozharness/external_tools/clobberer.py
new file mode 100755
index 000000000..a58b00402
--- /dev/null
+++ b/testing/mozharness/external_tools/clobberer.py
@@ -0,0 +1,280 @@
+#!/usr/bin/python
+# vim:sts=2 sw=2
+import sys
+import shutil
+import urllib2
+import urllib
+import os
+import traceback
+import time
+if os.name == 'nt':
+ from win32file import RemoveDirectory, DeleteFile, \
+ GetFileAttributesW, SetFileAttributesW, \
+ FILE_ATTRIBUTE_NORMAL, FILE_ATTRIBUTE_DIRECTORY
+ from win32api import FindFiles
+
+clobber_suffix = '.deleteme'
+
+
+def ts_to_str(ts):
+ if ts is None:
+ return None
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ts))
+
+
+def write_file(ts, fn):
+ assert isinstance(ts, int)
+ f = open(fn, "w")
+ f.write(str(ts))
+ f.close()
+
+
+def read_file(fn):
+ if not os.path.exists(fn):
+ return None
+
+ data = open(fn).read().strip()
+ try:
+ return int(data)
+ except ValueError:
+ return None
+
+def rmdirRecursiveWindows(dir):
+ """Windows-specific version of rmdirRecursive that handles
+ path lengths longer than MAX_PATH.
+ """
+
+ dir = os.path.realpath(dir)
+ # Make sure directory is writable
+ SetFileAttributesW('\\\\?\\' + dir, FILE_ATTRIBUTE_NORMAL)
+
+ for ffrec in FindFiles('\\\\?\\' + dir + '\\*.*'):
+ file_attr = ffrec[0]
+ name = ffrec[8]
+ if name == '.' or name == '..':
+ continue
+ full_name = os.path.join(dir, name)
+
+ if file_attr & FILE_ATTRIBUTE_DIRECTORY:
+ rmdirRecursiveWindows(full_name)
+ else:
+ SetFileAttributesW('\\\\?\\' + full_name, FILE_ATTRIBUTE_NORMAL)
+ DeleteFile('\\\\?\\' + full_name)
+ RemoveDirectory('\\\\?\\' + dir)
+
+def rmdirRecursive(dir):
+ """This is a replacement for shutil.rmtree that works better under
+ windows. Thanks to Bear at the OSAF for the code.
+ (Borrowed from buildbot.slave.commands)"""
+ if os.name == 'nt':
+ rmdirRecursiveWindows(dir)
+ return
+
+ if not os.path.exists(dir):
+ # This handles broken links
+ if os.path.islink(dir):
+ os.remove(dir)
+ return
+
+ if os.path.islink(dir):
+ os.remove(dir)
+ return
+
+ # Verify the directory is read/write/execute for the current user
+ os.chmod(dir, 0700)
+
+ for name in os.listdir(dir):
+ full_name = os.path.join(dir, name)
+ # on Windows, if we don't have write permission we can't remove
+ # the file/directory either, so turn that on
+ if os.name == 'nt':
+ if not os.access(full_name, os.W_OK):
+ # I think this is now redundant, but I don't have an NT
+ # machine to test on, so I'm going to leave it in place
+ # -warner
+ os.chmod(full_name, 0600)
+
+ if os.path.isdir(full_name):
+ rmdirRecursive(full_name)
+ else:
+ # Don't try to chmod links
+ if not os.path.islink(full_name):
+ os.chmod(full_name, 0700)
+ os.remove(full_name)
+ os.rmdir(dir)
+
+
+def do_clobber(dir, dryrun=False, skip=None):
+ try:
+ for f in os.listdir(dir):
+ if skip is not None and f in skip:
+ print "Skipping", f
+ continue
+ clobber_path = f + clobber_suffix
+ if os.path.isfile(f):
+ print "Removing", f
+ if not dryrun:
+ if os.path.exists(clobber_path):
+ os.unlink(clobber_path)
+ # Prevent repeated moving.
+ if f.endswith(clobber_suffix):
+ os.unlink(f)
+ else:
+ shutil.move(f, clobber_path)
+ os.unlink(clobber_path)
+ elif os.path.isdir(f):
+ print "Removing %s/" % f
+ if not dryrun:
+ if os.path.exists(clobber_path):
+ rmdirRecursive(clobber_path)
+ # Prevent repeated moving.
+ if f.endswith(clobber_suffix):
+ rmdirRecursive(f)
+ else:
+ shutil.move(f, clobber_path)
+ rmdirRecursive(clobber_path)
+ except:
+ print "Couldn't clobber properly, bailing out."
+ sys.exit(1)
+
+
+def getClobberDates(clobberURL, branch, buildername, builddir, slave, master):
+ params = dict(branch=branch, buildername=buildername,
+ builddir=builddir, slave=slave, master=master)
+ url = "%s?%s" % (clobberURL, urllib.urlencode(params))
+ print "Checking clobber URL: %s" % url
+ # The timeout arg was added to urlopen() at Python 2.6
+ # Deprecate this test when esr17 reaches EOL
+ if sys.version_info[:2] < (2, 6):
+ data = urllib2.urlopen(url).read().strip()
+ else:
+ data = urllib2.urlopen(url, timeout=30).read().strip()
+
+ retval = {}
+ try:
+ for line in data.split("\n"):
+ line = line.strip()
+ if not line:
+ continue
+ builddir, builder_time, who = line.split(":")
+ builder_time = int(builder_time)
+ retval[builddir] = (builder_time, who)
+ return retval
+ except ValueError:
+ print "Error parsing response from server"
+ print data
+ raise
+
+if __name__ == "__main__":
+ from optparse import OptionParser
+ parser = OptionParser(
+ "%prog [options] clobberURL branch buildername builddir slave master")
+ parser.add_option("-n", "--dry-run", dest="dryrun", action="store_true",
+ default=False, help="don't actually delete anything")
+ parser.add_option("-t", "--periodic", dest="period", type="float",
+ default=None, help="hours between periodic clobbers")
+ parser.add_option('-s', '--skip', help='do not delete this file/directory',
+ action='append', dest='skip', default=['last-clobber'])
+ parser.add_option('-d', '--dir', help='clobber this directory',
+ dest='dir', default='.', type='string')
+ parser.add_option('-v', '--verbose', help='be more verbose',
+ dest='verbose', action='store_true', default=False)
+
+ options, args = parser.parse_args()
+ if len(args) != 6:
+ parser.error("Incorrect number of arguments")
+
+ if options.period:
+ periodicClobberTime = options.period * 3600
+ else:
+ periodicClobberTime = None
+
+ clobberURL, branch, builder, my_builddir, slave, master = args
+
+ try:
+ server_clobber_dates = getClobberDates(
+ clobberURL, branch, builder, my_builddir, slave, master)
+ except:
+ if options.verbose:
+ traceback.print_exc()
+ print "Error contacting server"
+ sys.exit(1)
+
+ if options.verbose:
+ print "Server gave us", server_clobber_dates
+
+ now = int(time.time())
+
+ # Add ourself to the server_clobber_dates if it's not set
+ # This happens when this slave has never been clobbered
+ if my_builddir not in server_clobber_dates:
+ server_clobber_dates[my_builddir] = None, ""
+
+ root_dir = os.path.abspath(options.dir)
+
+ for builddir, (server_clobber_date, who) in server_clobber_dates.items():
+ builder_dir = os.path.join(root_dir, builddir)
+ if not os.path.isdir(builder_dir):
+ print "%s doesn't exist, skipping" % builder_dir
+ continue
+ os.chdir(builder_dir)
+
+ our_clobber_date = read_file("last-clobber")
+
+ clobber = False
+ clobberType = None
+
+ print "%s:Our last clobber date: " % builddir, ts_to_str(our_clobber_date)
+ print "%s:Server clobber date: " % builddir, ts_to_str(server_clobber_date)
+
+ # If we don't have a last clobber date, then this is probably a fresh build.
+ # We should only do a forced server clobber if we know when our last clobber
+ # was, and if the server date is more recent than that.
+ if server_clobber_date is not None and our_clobber_date is not None:
+ # If the server is giving us a clobber date, compare the server's idea of
+ # the clobber date to our last clobber date
+ if server_clobber_date > our_clobber_date:
+ # If the server's clobber date is greater than our last clobber date,
+ # then we should clobber.
+ clobber = True
+ clobberType = "forced"
+ # We should also update our clobber date to match the server's
+ our_clobber_date = server_clobber_date
+ if who:
+ print "%s:Server is forcing a clobber, initiated by %s" % (builddir, who)
+ else:
+ print "%s:Server is forcing a clobber" % builddir
+
+ if not clobber:
+ # Disable periodic clobbers for builders that aren't my_builddir
+ if builddir != my_builddir:
+ continue
+
+ # Next, check if more than the periodicClobberTime period has passed since
+ # our last clobber
+ if our_clobber_date is None:
+ # We've never been clobbered
+ # Set our last clobber time to now, so that we'll clobber
+ # properly after periodicClobberTime
+ clobberType = "purged"
+ our_clobber_date = now
+ write_file(our_clobber_date, "last-clobber")
+ elif periodicClobberTime and now > our_clobber_date + periodicClobberTime:
+ # periodicClobberTime has passed since our last clobber
+ clobber = True
+ clobberType = "periodic"
+ # Update our clobber date to now
+ our_clobber_date = now
+ print "%s:More than %s seconds have passed since our last clobber" % (builddir, periodicClobberTime)
+
+ if clobber:
+ # Finally, perform a clobber if we're supposed to
+ print "%s:Clobbering..." % builddir
+ do_clobber(builder_dir, options.dryrun, options.skip)
+ write_file(our_clobber_date, "last-clobber")
+
+ # If this is the build dir for the current job, display the clobber type in TBPL.
+ # Note in the case of purged clobber, we output the clobber type even though no
+ # clobber was performed this time.
+ if clobberType and builddir == my_builddir:
+ print "TinderboxPrint: %s clobber" % clobberType
diff --git a/testing/mozharness/external_tools/count_and_reboot.py b/testing/mozharness/external_tools/count_and_reboot.py
new file mode 100755
index 000000000..9e8ae35a6
--- /dev/null
+++ b/testing/mozharness/external_tools/count_and_reboot.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Created by Chris AtLee on 2008-11-04
+"""count_and_reboot.py [-n maxcount] -f countfile
+
+Increments the value in countfile, and reboots the machine once the count
+reaches or exceeds maxcount."""
+
+import os, sys, time
+
+if sys.platform in ('darwin', 'linux2'):
+ def reboot():
+ # -S means to accept password from stdin, which we then redirect from
+ # /dev/null
+ # This results in sudo not waiting forever for a password. If sudoers
+ # isn't set up properly, this will fail immediately
+ os.system("sudo -S reboot < /dev/null")
+ # After starting the shutdown, we go to sleep since the system can
+ # take a few minutes to shut everything down and reboot
+ time.sleep(600)
+
+elif sys.platform == "win32":
+ # Windows
+ def reboot():
+ os.system("shutdown -f -r -t 0")
+ # After starting the shutdown, we go to sleep since the system can
+ # take a few minutes to shut everything down and reboot
+ time.sleep(600)
+
+def increment_count(fname):
+ try:
+ current_count = int(open(fname).read())
+ except:
+ current_count = 0
+ current_count += 1
+ open(fname, "w").write("%i\n" % current_count)
+ return current_count
+
+if __name__ == '__main__':
+ from optparse import OptionParser
+
+ parser = OptionParser(__doc__)
+ parser.add_option("-n", "--max-count", dest="maxcount", default=10,
+ help="reboot after <maxcount> runs", type="int")
+ parser.add_option("-f", "--count-file", dest="countfile", default=None,
+ help="file to record count in")
+ parser.add_option("-z", "--zero-count", dest="zero", default=False,
+ action="store_true", help="zero out the counter before rebooting")
+
+ options, args = parser.parse_args()
+
+ if not options.countfile:
+ parser.error("countfile is required")
+
+ if increment_count(options.countfile) >= options.maxcount:
+ if options.zero:
+ open(options.countfile, "w").write("0\n")
+ print "************************************************************************************************"
+ print "*********** END OF RUN - NOW DOING SCHEDULED REBOOT; FOLLOWING ERROR MESSAGE EXPECTED **********"
+ print "************************************************************************************************"
+ sys.stdout.flush()
+ reboot()
diff --git a/testing/mozharness/external_tools/detect_repo.py b/testing/mozharness/external_tools/detect_repo.py
new file mode 100644
index 000000000..67466a03e
--- /dev/null
+++ b/testing/mozharness/external_tools/detect_repo.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# Stolen from taskcluster-vcs
+# https://github.com/taskcluster/taskcluster-vcs/blob/master/src/vcs/detect_remote.js
+
+from urllib2 import Request, urlopen
+from urlparse import urlsplit, urlunsplit
+from os.path import exists, join
+
+def first(seq):
+ return next(iter(filter(lambda x: x, seq)), '')
+
+def all_first(*sequences):
+ return map(lambda x: first(x), sequences)
+
+# http://codereview.stackexchange.com/questions/13027/joining-url-path-components-intelligently
+# I wonder why this is not a builtin feature in Python
+def urljoin(*parts):
+ schemes, netlocs, paths, queries, fragments = zip(*(urlsplit(part) for part in parts))
+ scheme, netloc, query, fragment = all_first(schemes, netlocs, queries, fragments)
+ path = '/'.join(p.strip('/') for p in paths if p)
+ return urlunsplit((scheme, netloc, path, query, fragment))
+
+def _detect_remote(url, content):
+ try:
+ response = urlopen(url)
+ except Exception:
+ return False
+
+ if response.getcode() != 200:
+ return False
+
+ content_type = response.headers.get('content-type', '')
+ return True if content in content_type else False
+
+def detect_git(url):
+ location = urljoin(url, '/info/refs?service=git-upload-pack')
+ req = Request(location, headers={'User-Agent':'git/2.0.1'})
+ return _detect_remote(req, 'x-git')
+
+def detect_hg(url):
+ location = urljoin(url, '?cmd=lookup&key=0')
+ return _detect_remote(location, 'mercurial')
+
+def detect_local(url):
+ if exists(join(url, '.git')):
+ return 'git'
+
+ if exists(join(url, '.hg')):
+ return 'hg'
+
+ return ''
+
diff --git a/testing/mozharness/external_tools/download_file.py b/testing/mozharness/external_tools/download_file.py
new file mode 100755
index 000000000..91b0a4668
--- /dev/null
+++ b/testing/mozharness/external_tools/download_file.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" Helper script for download_file()
+
+We lose some mozharness functionality by splitting this out, but we gain output_timeout.
+"""
+
+import os
+import socket
+import sys
+import urllib2
+import urlparse
+
+
+def download_file(url, file_name):
+ try:
+ f_length = None
+ f = urllib2.urlopen(url, timeout=30)
+ if f.info().get('content-length') is not None:
+ f_length = int(f.info()['content-length'])
+ got_length = 0
+ local_file = open(file_name, 'wb')
+ while True:
+ block = f.read(1024 ** 2)
+ if not block:
+ if f_length is not None and got_length != f_length:
+ raise urllib2.URLError("Download incomplete; content-length was %d, but only received %d" % (f_length, got_length))
+ break
+ local_file.write(block)
+ if f_length is not None:
+ got_length += len(block)
+ local_file.close()
+ print "%s downloaded to %s" % (url, file_name)
+ except urllib2.HTTPError, e:
+ print "Warning: Server returned status %s %s for %s" % (str(e.code), str(e), url)
+ raise
+ except urllib2.URLError, e:
+ print "URL Error: %s" % url
+ remote_host = urlparse.urlsplit(url)[1]
+ if remote_host:
+ os.system("nslookup %s" % remote_host)
+ raise
+ except socket.timeout, e:
+ print "Timed out accessing %s: %s" % (url, str(e))
+ raise
+ except socket.error, e:
+ print "Socket error when accessing %s: %s" % (url, str(e))
+ raise
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ if len(sys.argv) != 2:
+ print "Usage: download_file.py URL [FILENAME]"
+ sys.exit(-1)
+ parts = urlparse.urlparse(sys.argv[1])
+ file_name = parts[2].split('/')[-1]
+ else:
+ file_name = sys.argv[2]
+ if os.path.exists(file_name):
+ print "%s exists; removing" % file_name
+ os.remove(file_name)
+ if os.path.exists(file_name):
+ print "%s still exists; exiting"
+ sys.exit(-1)
+ download_file(sys.argv[1], file_name)
diff --git a/testing/mozharness/external_tools/extract_and_run_command.py b/testing/mozharness/external_tools/extract_and_run_command.py
new file mode 100644
index 000000000..ab48ee1df
--- /dev/null
+++ b/testing/mozharness/external_tools/extract_and_run_command.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+"""\
+Usage: extract_and_run_command.py [-j N] [command to run] -- [files and/or directories]
+ -j is the number of workers to start, defaulting to 1.
+ [command to run] must be a command that can accept one or many files
+ to process as arguments.
+
+WARNING: This script does NOT respond to SIGINT. You must use SIGQUIT or SIGKILL to
+ terminate it early.
+ """
+
+### The canonical location for this file is
+### https://hg.mozilla.org/build/tools/file/default/stage/extract_and_run_command.py
+###
+### Please update the copy in puppet to deploy new changes to
+### stage.mozilla.org, see
+# https://wiki.mozilla.org/ReleaseEngineering/How_To/Modify_scripts_on_stage
+
+import logging
+import os
+from os import path
+import sys
+from Queue import Queue
+import shutil
+import subprocess
+import tempfile
+from threading import Thread
+import time
+
+logging.basicConfig(
+ stream=sys.stdout, level=logging.INFO, format="%(message)s")
+log = logging.getLogger(__name__)
+
+try:
+ # the future - https://github.com/mozilla/build-mar via a venv
+ from mardor.marfile import BZ2MarFile
+except:
+ # the past - http://hg.mozilla.org/build/tools/file/default/buildfarm/utils/mar.py
+ sys.path.append(
+ path.join(path.dirname(path.realpath(__file__)), "../buildfarm/utils"))
+ from mar import BZ2MarFile
+
+SEVENZIP = "7za"
+
+
+def extractMar(filename, tempdir):
+ m = BZ2MarFile(filename)
+ m.extractall(path=tempdir)
+
+
+def extractExe(filename, tempdir):
+ try:
+ # We don't actually care about output, put we redirect to a tempfile
+ # to avoid deadlocking in wait() when stdout=PIPE
+ fd = tempfile.TemporaryFile()
+ proc = subprocess.Popen([SEVENZIP, 'x', '-o%s' % tempdir, filename],
+ stdout=fd, stderr=subprocess.STDOUT)
+ proc.wait()
+ except subprocess.CalledProcessError:
+ # Not all EXEs are 7-zip files, so we have to ignore extraction errors
+ pass
+
+# The keys here are matched against the last 3 characters of filenames.
+# The values are callables that accept two string arguments.
+EXTRACTORS = {
+ '.mar': extractMar,
+ '.exe': extractExe,
+}
+
+
+def find_files(d):
+ """yields all of the files in `d'"""
+ for root, dirs, files in os.walk(d):
+ for f in files:
+ yield path.abspath(path.join(root, f))
+
+
+def rchmod(d, mode=0755):
+ """chmods everything in `d' to `mode', including `d' itself"""
+ os.chmod(d, mode)
+ for root, dirs, files in os.walk(d):
+ for item in dirs:
+ os.chmod(path.join(root, item), mode)
+ for item in files:
+ os.chmod(path.join(root, item), mode)
+
+
+def maybe_extract(filename):
+ """If an extractor is found for `filename', extracts it to a temporary
+ directory and chmods it. The consumer is responsible for removing
+ the extracted files, if desired."""
+ ext = path.splitext(filename)[1]
+ if ext not in EXTRACTORS.keys():
+ return None
+ # Append the full filepath to the tempdir
+ tempdir_root = tempfile.mkdtemp()
+ tempdir = path.join(tempdir_root, filename.lstrip('/'))
+ os.makedirs(tempdir)
+ EXTRACTORS[ext](filename, tempdir)
+ rchmod(tempdir_root)
+ return tempdir_root
+
+
+def process(item, command):
+ def format_time(t):
+ return time.strftime("%H:%M:%S", time.localtime(t))
+ # Buffer output to avoid interleaving of multiple workers'
+ logs = []
+ args = [item]
+ proc = None
+ start = time.time()
+ logs.append("START %s: %s" % (format_time(start), item))
+ # If the file was extracted, we need to process all of its files, too.
+ tempdir = maybe_extract(item)
+ if tempdir:
+ for f in find_files(tempdir):
+ args.append(f)
+
+ try:
+ fd = tempfile.TemporaryFile()
+ proc = subprocess.Popen(command + args, stdout=fd)
+ proc.wait()
+ if proc.returncode != 0:
+ raise Exception("returned %s" % proc.returncode)
+ finally:
+ if tempdir:
+ shutil.rmtree(tempdir)
+ fd.seek(0)
+ # rstrip() here to avoid an unnecessary newline, if it exists.
+ logs.append(fd.read().rstrip())
+ end = time.time()
+ elapsed = end - start
+ logs.append("END %s (%d seconds elapsed): %s\n" % (
+ format_time(end), elapsed, item))
+ # Now that we've got all of our output, print it. It's important that
+ # the logging module is used for this, because "print" is not
+ # thread-safe.
+ log.info("\n".join(logs))
+
+
+def worker(command, errors):
+ item = q.get()
+ while item != None:
+ try:
+ process(item, command)
+ except:
+ errors.put(item)
+ item = q.get()
+
+if __name__ == '__main__':
+ # getopt is used in favour of optparse to enable "--" as a separator
+ # between the command and list of files. optparse doesn't allow that.
+ from getopt import getopt
+ options, args = getopt(sys.argv[1:], 'j:h', ['help'])
+
+ concurrency = 1
+ for o, a in options:
+ if o == '-j':
+ concurrency = int(a)
+ elif o in ('-h', '--help'):
+ log.info(__doc__)
+ sys.exit(0)
+
+ if len(args) < 3 or '--' not in args:
+ log.error(__doc__)
+ sys.exit(1)
+
+ command = []
+ while args[0] != "--":
+ command.append(args.pop(0))
+ args.pop(0)
+
+ q = Queue()
+ errors = Queue()
+ threads = []
+ for i in range(concurrency):
+ t = Thread(target=worker, args=(command, errors))
+ t.start()
+ threads.append(t)
+
+ # find_files is a generator, so work will begin prior to it finding
+ # all of the files
+ for arg in args:
+ if path.isfile(arg):
+ q.put(arg)
+ else:
+ for f in find_files(arg):
+ q.put(f)
+ # Because the workers are started before we start populating the q
+ # they can't use .empty() to determine whether or not their done.
+ # We also can't use q.join() or j.task_done(), because we need to
+ # support Python 2.4. We know that find_files won't yield None,
+ # so we can detect doneness by having workers die when they get None
+ # as an item.
+ for i in range(concurrency):
+ q.put(None)
+
+ for t in threads:
+ t.join()
+
+ if not errors.empty():
+ log.error("Command failed for the following files:")
+ while not errors.empty():
+ log.error(" %s" % errors.get())
+ sys.exit(1)
diff --git a/testing/mozharness/external_tools/git-ssh-wrapper.sh b/testing/mozharness/external_tools/git-ssh-wrapper.sh
new file mode 100755
index 000000000..86ea37088
--- /dev/null
+++ b/testing/mozharness/external_tools/git-ssh-wrapper.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+# From http://www.reddit.com/r/git/comments/hdn1a/howto_using_the_git_ssh_variable_for_private_keys/
+
+# In the example, this was
+# if [ -e "$GIT_SSH_KEY" ]; then
+# However, that broke on tilde expansion.
+# Let's just assume if GIT_SSH_KEY is set, we want to use it.
+if [ "x$GIT_SSH_KEY" != "x" ]; then
+ exec ssh -o IdentityFile="$GIT_SSH_KEY" -o ServerAliveInterval=600 "$@"
+else
+ exec ssh -o ServerAliveInterval=600 "$@"
+fi
diff --git a/testing/mozharness/external_tools/gittool.py b/testing/mozharness/external_tools/gittool.py
new file mode 100755
index 000000000..520aeaf38
--- /dev/null
+++ b/testing/mozharness/external_tools/gittool.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+### Compressed module sources ###
+module_sources = [('util', 'eJxlkMEKgzAQRO/5isWTQhFaSg8Ff6LnQknM2ixoItmov1+T2FLb3DY7mZkXGkbnAxjJpiclKI+K\nrOSWSAihsQM28sjBk32WXF0FrKe4YZi8hWAwrZMDuC5fJC1wkaQ+K7eIOqpXm1rTEzmU1ZahLuc/\ncwYlGS9nQNs6jfoACwUDQVIf/RdDAXmULYK0Gpo1aXAz6l3sG6VWJ/nIdjHdx45jWTR3W3xVSKTT\n8NuEE9a+DMzomZz9QOencdyDJ7LvH6zEC9SEeBQ=\n'), ('util.file', 'eJzNVk2P2zYQvftXTF0sLC9ctTbaSwAfim2BFCjSIsktCLy0SFnMSqRAUuv1v+8MP0RZ3uTQU3Sw\nJXLmcWbem5GWy+Vb0fbCQD2oykmtLDgNDVO8FVBL/NG4y/zOcrlcyK7XxkGrTyepTulR23Rnm8HJ\nNj01zDatPKZHJ7qeMBe10R08aFXL07/MWDw+Wrxn5+nyAs+BfTqtPAn3N94KUxwOinXicFgvFgsu\naqh01zMjCkLfbnzgu/WbBeCFUcddTK0RaKqcUM6CrsGdtbe1G+iZtYKDVCAkmhlg1rvjhRVQoRah\nLuiK21UlrJXHVKaeucaW8IfGbQYW88E8I4Bi8lmAdQaTiKFKq9UGrAauQWkHg8VKK2iZOREZFBOV\nm7xlDdJKZR1T1ZjhkVkRAGOadPk9rBcFnAxXZrWBj2YQ66+A7b4BtpuC7W7A/BGHsaD7sFAawXiR\nLXZzi93Uwgg3GHUDtZ+5Rp65NKJy2lxQJY5hHsW4gtUc6lq+ZNrhfcB2GDAlTuyfkAmVYbwaCMdv\n9kY/S44qOMuWV8xwjxRgN8SpRH6oPx5bC7XWP98fmXmERFQjHWbI1KX4VJdCcXtGJRUxKrRHXklf\n2pattA5jyMGvP4/0kBoQKROB6i+FMdoUywc9tNxb1FJxuL+zBHhnl3AHRYozg15VGDHHZukvVN3C\nmgrNrdv4pU5zsffkjhV8wGVAK8rZ2/XYRcI8k45xLHQSO4BGBrYFONmh9GU9YqHQvFZSecJoKG9O\nHzNPjjn1iQttzFxmFqhpN7EIudqGbe3QFXVOKqkCCf/w9veftn5K+Wkwmw6+rx/rxw0VuREvRGHH\n3Eg3kh0HXEnHJMn3Y9NQwxxXYfncEBrVI6d3bHX1RE3Rh474bbuDe9j+svs1JxgV4U2zp/dGn6dx\npSmHnjMnCm95zXyJwXN5wh4vxrqwWhwG1Ur15JubxmkuUdiAtAHypLRRxLoXok3d5CvEceSplQPx\ngqpOxXHm8maaA4qeJmQpLel+duI4crBFjNbOa9iGMW5jy5xZmyPdoCB7rs9qqtc5km82D3G7n4mK\ncX3RUhXh7Hr9qvlVxfpbG0QyHSVHKHlbtFZcnz+phi+Z/Vo5IuqcJW8jXirRO/jnw59EyAYmZ/wI\nfxFdApbvNA6vqonvcZMnw3JKjaDpojTN3N11AEE/30jFMGnFVFGz5kbFZVGRQXvxXT7OFDTAVx8J\ni/mvA20YDmWJPWg6wSXqOcyWBoe2ofTpo4PwonOSW81REl3vxbofvzPK7snSPc3Zfao53pNZ4YNb\nvzaZ9PFL4RvYJ+FbeENE1Dy0NZ61OuPijXOeQDGWYEHK8NQVcTlWJhau1YzTew6/euZKCKuY0ey7\nqJjMTFoN4+NT8v68hh/2kB8zaXEivNNKTCdEQInx4FdWCif84atP+G9DrEIf/tGODW0iN8eB8/AQ\njYv4v/YMTvYDRjHDXN8EGV0wnBvbaewxlJvgD6ii7yUBCuV/5XDUuv1ekqBYBLt1eS2R/wBE3uXX\n'), ('util.commands', 'eJzdWW1v2zgS/u5fwXPQs9x1laDFvSBA9pDdJnfBtkkucS9XtIEgS+OYG4n0kVRc76+/GZKSKPkl\n2T3slzPQOhLJ4bw888yQHg6H55XIDJdCs7lUTFVCcPHAMlmWqcj1cDgc8HIplWG6mi2VzEDr+o1s\n/jK8hPrvZZEaFFXWz4V8eECRA/xmJ/VT/ADmA/4JKkoSkZaQJOPBwKj18YDhxy9dcfHu7ZwXsPEy\nXXL77vrz3cXlu7coeKoqGMC3DJaGXdiZZ0pJddybdp4WGgaDQQ5z0iXJyjzCfxP2+vXjKlUPeuxW\nHLBslTOumV5CxtOCccHMIsXtgaXFKl1rtkqFYRwNVlwYQBHwBILxOb4baSak8YLg27LgGTfFmmUL\nqUHY92431Mj9EWdyuY7GztA5G+HuI5JB+7oZTq926Rc75x4lSE3uxCe/Hu2KuZjLaOjDeMxup6c3\n0+HO4Vd6yF4FEY4Lrs1b9EvBBZB/xm4pQeQR1hP2lBYVtLrF3IDCf6WOxq2eWzeym02cFG1UZCWh\neBeSEtQDJCCeIvznRQlY0RtnKP7BlRShu/x4XC3z1IBdaN8rMJUS9bDfAAG+M+YI9ptKMBxiUcrI\nBUzOGU6oShBGj2PGblKuIXTUj2lRQH7tniziMHxWmllAnUYIAW4QMNwsMKbizS+gJAq7mHcmOX0R\ncVVGwuZVUawnoSVHMaWj9+wWKzze7oA5V6B0BHA6x9jUecdmkKUVmoAwzqUYGYdiNIJMJW24WNhQ\n5jV60fNPqdKsrHKCwwMKtxNlZZaVaQCL80b7wErjBNY2wp0Rp3xDAPYBZxOxxPSfj/UOWDldjoft\nJO+yIFLZArLHJENTt7nNM8feyG5B9qhR4ezm5upmNCFBCQ2dECEF+hBwXA5xgZIDO6FIlxryrrXs\nDTP7LD67fM+iV/Hbua7Xj78KzKv6IYD7fyoOZifoc3gSiDTKriWICFPMv5mw0WrUyaQ9ztQmxxic\nNEvxGZRqn1tnt3opKKWBVjEN6gnUhCE8FZWEk0spAF/rxU+wbh9ORvjfaNI4J/j0TEOpyVLBnH9D\n677gqvsarfUWbRDauTF8MyDy6MvoTTFqtblvuNkp9MxSjkvRl8vULPDtEmNGgiK3duyFBSvT5ZJW\nOh80W3HNhTapyMC5aJZqQNLELBx39if78Os+jFbAdLUXvmM95Hc4MVli4sucZ8lS1nHFedQPJFTh\nFFL1ybujowmj8fbVUfz2T1vD4T+1DELLLM0efSh/JfkSt6QBBBlRpoUhI27FxFgWQI2MlVabQpn2\nYtrepGwr67fQdkvZg20uYHPfdaFwzL0ZSrMKub1I+hxdLFdEt40LvIYOOW5z7DPgG2SVFWXSR9DI\nFQK7KpooNqLXYgZBpUxCVNNQBoYV3VHH4v+6zDxbQcgTKCQAzLVlxy2OaD25pVwVqUbmtSA9CWYO\nHCgW2NnavrU1Q9G2tGdsc3A8aEbQeBzktrFklEHHnZQjk3KYVQ/R0KPaQxBBZRsulY07C5y8kxN2\ndLyRu7sqUmBBf8lvKVF9GXXOdAYA+/VNDdXzCR2pbEJ0EvhQyNWOngK9QYNvwoh9vyd/6HOACmsw\n4RIjWfokeY6nhrQs7UHKZ3w3WCEscN+ewbXznUY7nI4a91ll000BKshBpNBOKqLGPHqlx3gS2EPm\nUX/9JFBwvBnTTkcXfvpyop2UtCnUN2tn9otU37oDGQ8WCdZ4a6zFTY61w8vAxRPGH4SkmhrH8XBf\nIfNbb2vv7NBWpJIW3lbUoykuNWljQiNvU2Aa4k7FcK8Swz4sMcvy8TNrJvWeWyDwzNJbCgw5zRBE\nmuDgA+U2HRyjvkbPefH5T4CG/1lWTTgBE1gO0AXAMuo0M3VLhOfpxJUEx/lcZEWVQ+L7WnuLMKHS\nZhIMcP38a1uatn0ISp3rMLobuvKHPQaYurduOgc/M3c3FLUU7D7xQa2IJrlpJmvcGFmqPaASbSps\nI7xQbC4hLWPnqDsXVXfvsZYV0wtZFTmVc6rttuw3jQxSX5Yu0RbANq1AI/G7lJUgm600pxeLvsfx\nOaxwuaw0eWC2NqDHk0bNHNK8kNljc9rlfXeEfYxVu1Oqb6fvrz5N3amuk5LNZCqfg+c6nN/nUOu9\ncMKGbdbtOuju7UL8iSscvLg+a05e7uv53OnaXO+KjMVNoEmjtR10W8eIlLxbQu2oA3Qmc2B/2Ogu\nXlK3e1J8EQ+2oQ6oTr3NLujZq4HORDe8cW8QdJ0vuRlAUmwVOWAfsRPHBQpc6njvufxl0qVpU7za\ne4C4cXOwfeu13+X6YP/tAZ7QnyChQ2xE/7W8NqXcp64f5yyLNANiNHs9qBdYZIpYlcgk3v6VVI8a\n2cfQCaESCEx/rhK5XOmYTbHk4QRkkB8gVVhnrIOubk/PrUR32MrBHaWiHyR6fIUGz5Us2aziRT6T\nBsk8fYK4vrceB0eYugO6IWuIz2w/bO0Z1JmecJ14fbbfYH7StDJxZtVTGXUMLXZ6o85lPWQ1OxKI\n2wsCrA06dLHDkfUyOicv8GA3U/IRz3TYxD3qMBtqIVzTUF8IfXCGi+R+jfYLeomQA/YvPNTN1zZk\nOVeQGanWhBPiisMVHfgOXR8CbWgrpQg8dD8y8Dtli1LmdqMJO/rL0ZEPFC2huxiiZOkuqXGXvqZ0\nAre/KbgbY2vTz5ILL49GxoGTMR/vXMAmtqmuT6wLxBOzKtNtQsm1tud1qpk07JwRyLGndjzRHbaG\nA6cajJwsmS/yxAaiFz2n6gkbCTPqBq6FSWrvFqLGNHu5dJdc/TTe7DgP2AXVZvHoKrQ9Mq5Q3xxT\nD0/hE8wZg1MCK7EdvpxukVOmGcoBykws0aS6teViVLIHaTsDyQogCdz+UGGZYIucN9Qf+uj2gOki\nHdh19Ocm3Bu4pGA3U3uWh1zVzglYst+cH7D31gNYnm3zQor0sqsbgzA5dmmx0yoL4t4sn089bWmg\nbGCNTHwQspPtGfs0RDc/AudZRizlLwtyt9aOxLdQm15rAyWVc/9bXezetL8/+RkY02joswM5c/iR\nZ0pqOTfDwG5fMu0PcJ3lsW3iNd1p4dHn89/vLi6fWbczG8K53qxtZNvUpzql39if7+Y8Y2FBqimV\n1iCAxYNZ6PD8xT6e/ju5Pp3+I24UuJb2DGQ9nBVyNgMFKl6u486FWaqRxEzX5e5CiXZq6QjpsGir\nquM2QoGfNvqKn799/Tpi39mVe2pGs2zDseEi//vncZhWXVRv4dHA7/Vd8iiHgh2es8N/siFW0RGe\n/brVYDPN+hIsttnh7XYZYe/UKSBExOnM/xLc/C4c34I5x+9TYxRHWgN9F/WdNwmmn198OEtOp9Ob\nix8+Tc+Sy6ubj6cf6p1v8ZABjuDxFOLwgp2UvZJNLbUT+5VAHZbeFhLnxf7+m4hv9XkPBRggCzaX\ntSVvPkdHUC7WP33H5wguWqU3luEXvnodvx6FFRGnJin6CLFlhX05um8vxVyldO//et+BSJ2L8YjV\npdc+xr1ClWE3zkXVcv+LanC4VaviH3fH6/3FzdmP06ubz93d+1TwIvp/MYYCFn8RkDY32BHlnprt\nfNuowvsa/lug8V+mJBic\n'), ('util.retry', 'eJytVk2P2zYQvetXDFwsLDuC4C2wORhxsUHQFgWKnHqXaYmyiUqkQ1LxGkX/e2dIivpy0h6qw1oa\nDh9nHt/MjmivSluwouVJrVULdSdLq1RjQPilm2ZX49dKJS1/s4049YvB0jLJzlwnwdqo81nIc4K/\ncOi/8jO3v+Mr12lRSNbyotgkSVLxGjS3+p6y0golM2DW8vZqzeElA9NwfqXgDu93GbTsrRgsL7AF\ntCYQH4dT8LeSPJQ0h/Tn/j3bZFA2nMnuevisJMdj9Bkd0Pznzb3+9fdm77BWq9Un1jRw9AGtgdHB\nou1aUDVaQ3hrR5qBTlrRgLBgurLkvDJDRJgb6xqLyYNV8JLDMUa/BmHAXjjIrj1xTciGI5uVIdcb\nEzainLi9cS4jL9kM9/0OmKygUt2pIRNn5cVT0W/J0C3CTbOZULrOAY5zEl2kDGx3bThuiTiRWsqD\nYfoX1TUVRgsl684Xm8NvNQwwoDBbTa4S/yjDI1AjjOUVCPnobKY5aCYMOjgJ9peSEXl3uAm8qNOA\nFVxF2/JKMMubuwvjGK7e5XLV6quo0ItYK/Gm2QkzwwsksBHrbm0KBqy2mASmELMnxD7hz4pU1bVc\nWhOBQohwZYZCwwsTnpu76nSvSV92BKf5l05o1NUSCUPEwzTKBCOSlIEjHnFckbp1ScH1WxtuTETO\nI86R9L526R+9+D3P/SU7NYnSkkBiFBQ4pQBY8YOY0HjsKVxj4bgFSpR6Q7CHwt6M16SyMXWlB9dg\n876inlY8fBj6wX6QjzrnFT9153Q19X6qwBHgJDc2r+AJ0lHbgOkxo66z8YFI7GLP7u12EUiQhA+H\nWI5DJKjd/QSWQhOyVunKCXsP1FeoRJ8MysJeXA/a41ffhPz7agISn1U4EX4IKfQN01id0u6Nf/VQ\n+CFD+LE4uO00qsNtS7fklcF2G/yjqy+/RTNdphZYj7lREQwVv4dVRl8FMXD4Q3d8Gg3ebrjt/SLf\nsJAuduBNPGL+m4T/Kr4S36QyidwSbWM1Ttih1jE/b5DNT7D7D+f9wlAfVVCQu+kq9vUTrxV1M/LE\nJYzl8T3TMyhw4UPW3K2n3/EaAj+M3rfw48JzluWkFJYZz7En7hNvGg2E7AZjLSTKf1YiEt5RbQ1z\ngHB9YOvV10vUfwWheoD1eg0f8T9hqTSz2EKQ2zBHbHLszqylTtYZHEu8/+sA7tmiA2ulRhrL8zyZ\n+8Zh5Hm3G48jz7sB5cR0utlPYEKESfQpImRRowIVxkmNebTt1Q1a3jqeIMZbyeWKA9S8dveP6tyz\nQXhh2PGbwrjjfxBjxPS39Ti7gmR21DLE5PFqyB3v+3U2OsY5EEsjBP3vIlhwFlEKYb/D0v/M0CN2\n7oLjNNTHkvwDPQB6iA==\n'), ('util.git', 'eJzNW+uT27YR/66/ApF7IymWeEk/Xuam4/iReJrGntiZdMZ2JEoEJcQUIRPgyddM/vfuAyDAh+S7\nNkmrGVsiCSx2F7u/fRA3Ho+f1eXGKl0aketKqNLKKoUb5VYcld2J3XY8Ho/U/qArK7Txv0y9PlR6\nI01zp66KQ1oZGV0Xau2vKjka5ZXei9qqItno/T4tMyP807pcbvbZHIbt9Y1cHlK7m9PdD7WSFp9F\ns3NVSD/TpLlc1mWhyvcjv1aht1vgfwTf4tpfJVtpv4Ofspoul2W6l8vlbDQabYrUGPFE5mld2Fe7\ntJJfp0ZejQR8DvBo1H0EFLu3pkgok7lY7tP3cpmujS5qK6eVPOgZk1K5wKvE2LSyBhU7HaMYV5eX\nYzcEPw/EP4CCcE9QhUZ4cs0gVA5wgfTeFLKMCb1rBuFTGOSfXZixuIDtS3ByAiTxe4r/zWiKLIDD\nMRIRpbZgBUTgqkuuS4AkHPEAW1c8yykD9L3ES1J2rIu1sgZoeXtJUMpDoWxEbaeN5SFgQsmHWoM2\ncVpSSlvozVyMx7NRpIv+QGKzMLZSh+kYVBOmOE69KL9oVU5xvblgdTD3u9QA9zfKgGdMM4mP/aUT\nA9ziByJlxOuqlrzFPELIj8qAkKBGnIoOhDNsdRtpNDbu6ZvJVtnJXEzAWvFrsdAl7Ekp6aL8chKW\nfzcXm2N2jYRn0f6QUMgI7+fHjTzEXpo8TotCZi/56mlV6eqqO/tZWoD7xvLnjeg57uI5yWlAR/DE\nKZyfbdJSrKVIxbpKy81OANrYdCvwWXIfFZmdPi6AKKkmmzTc/TmKUSVYKmtlDf5/Tc+CYp7DY5UW\n6l8SPBcMYX+wt+QVRlld3YrUsmbE85x+eI0BGgplyonlKXOhLOBvUaDGGBQz1ibMW+HCKxhOYs2F\n3ckS1Qp32VH9xE0lUwsTvXZho9C7vekrk6mKZIkgCAwwUWWup2NaFuMAgMdctNUawe40PJGFh078\nYDhBfeF6BQg5sBgNi3CFnJGVm89ao06x1RkGEralyzur8a42QWbamd+WYEhamEDPH4hv/BbloOb3\nQtcWl4ebADqw+1Y7/XNM3ctM4QUwJTdgCjgENORoscxoBLSZ8N8tW0YifmLP2SHhHez5EQccagA8\n0AFodw+hSB0K3nrj6MF9AFe07AIZMRiqMjYOFBu424ElbnRpUxiK4VjTDFnamENH7TtpJ8ZLA0SR\nv7YgqjK278CwFRgRYaSJrYRd8MUrcra5iBQO+pOJrKoSgs21+OsX7a14IL4H602blUFFSCFJEgBL\noXNii4UweEn+xU6Vdgg1JFr3q1ShnztO0J8CAwBBYKgNCCEMMFDjMPr1YcJe8m7AF07NDnNGbSsX\nY3YGmDhzcauFhnjfI5JZAlmKtbF/DaC0Uwio8AYgKhMwjWziPvjQhsTeliOqgqQRvr7UB0hS3oxh\nMfBXcN+bBcV9vFgs4O4CVhlH4D0XgBXgTdcxkecvn85iM8EHyTEFLJ6Jz65Fx1JaTDbWWNtDjWkF\nzeU1ErDpbDpLOFEIK6BCga0Imkpd7QkxBrCKKc9aUQc0DLOnDaFr1j5gYnRrgNY4QUXNehGMSf4+\nMQxTM8fFCYthT4LcCsADf6OlBLdDZOco9gx+NXHHMEAphg02Nmtkkc9pRiW3dZFW7aE07JJkdkYI\nSbesbN+qRwN+BACWK5cwrbUu+BeIxw8rmZB3skeeMk0qPO5mfJHVscOYJUn/SZtSeRiLWTluxjjs\nUTYcA50tDOAJTsAxscY8Ac4oplkr3c3c1hvYeooGlG3POTK4/U8LiFMlYLzpshMbDGXpoF69/gXM\nwTCc5Rq/A4EJL07Ul27kOaLMRkTVRVkqQWmXAm0YdZzMQGqRR8lGcqwUJP/jC/O2xFqntbSHyk0h\n0zKuRR6I10cNNpNDfNvDMyPGNAatZK+zupCYZBx3CvJVir0QNY9SHFOIk0aLPK2SBpxbSSpRIXPM\no/+zicM5p/wTpsbMplm2xFTF+r3iC6qnmotIFnCgR1mG6M7PKLPOxCqatvL+DEUU4JPHf0wXVvhj\nxVYOu0MNABi8itZZeRftScuDyAQyzsiHOY2kn0UG6UZAFXdnSV9JyygFkwhdvNR34BGWXMC0+/G5\nbfjs8ziMn54zxs8bWbopcwwC32PKojhlcduVaYm5ioN4FerGDugFQRY3d4W28/Y2BG3IORaglEp2\nwA3vm2mUFOypHwHJnt3sphX6oHk4ffvq4Uy8neYSbr6d/QWdEsZIs0kPqMOgvTkt1Arv+8F4vk+2\nla4P0y/7xnM/wznvIIM2j6lZJtf1FiHmCs2BXISHIkiE7sX+1jEFWjlrNj40RBOuY667QXzUnwCg\nhCkbmtNQDYesmharUDahjPD/9AgQemFmjvfTypuH9aIK8F5+OxDC2kwCbrR5vDCf5Cswc3eo9N7s\n2k1z0WpwXKMeQ6vFXdaHDOLOEkdeU8UdlOBbgNfdniDoTGEeZhwNigdMotMxwI6fAdeF1ICKshUO\noup+B/uz8rysEDVWjs+V2OzkBiorqjqxM0rUGMMTNpMnmsMV1o20BOw6VmO8yi49AEDMwbs3RU2q\nh6TMqHVxC6zq9VpW2EGlVIMaOU3vwYlFDIINzLkEttjagOq1NpIgzY0Sawk4IhvGnMiNHTf6Q2rD\nTdiWmjmFkOWNqnSJHd3p+Jvnr5evvn30w9Pl149ePV0+ef4D2A3qfDa8St9bmiZl466tpmWbi05V\nQImMCZvezB2y+JgAstBmkB5EDJI+qRkbZcLNyMGODVXouJehFURuFGY1k1pFG7GBfa1moGtuobW3\nGyQgeG0V6CYaytr2I1x18pS+wHDbyyCzx7QqgUvgV9dFhuW5ay3EbYoL8xVUHCZdU58Dn8B3LMsc\nV1qi4ANsxhZDqu497O0D1Sv9FjfXHp3q/DF6H/JFkzr9MVdFnyjL3Yhust7vi7U0BYDo0gOBjgtV\nFHgzNVNDJd/UZ19FLtzr3LHFhwZYJN85a+x2YkKf06UwsGVosAAJgJd0j+j0bazPTqhJXAXWN9d+\nX+6BeAGLVEcFewziUqICOmmKIv+hZ4NY774DUrvvNuAzWvueH72eIazWdcWMopbijJnUobY7Kw5F\nupFnfTx24s37Jb3Y+lSVRIqB2lCVmfyY4Lzx7IxlNYQHzGuooRrGt/coaoEODDmzhU5zEDuOEnJX\n0N4BQg24OVsw6dqpLm0i75wDHMpzlI7CLr1xwat5z5IWmI7eUjfd6HnTPIWaH5UsSknrOAKUiYKV\n3todvhBkr9dLvn0ddYviVzmwW+2deoAFYKbRFYmjwLQwB7lRuZKQdENxiD1azJ7ljax4yVC+h1XD\nmwl8Bdd97dJ648Srx5ylG1unBcRsZCIXbM6wNHDoRMc6iAWPSPhMgAz56PbAO3L+aS7RfD/9gmxI\nWdT1CZtsmi1ym6PsydX9zvj7V4OY1QWJZ0QCnRUkM4wRjeu2xvYiIhN4/eLJiyvxLWAb+CYtzHkq\nYYeByuU9Kc1c2nRrLv8Jnx6R6P1Yz5riD1GP+zIc5jrwNOvNHX5pcXeKPUjsvBO5V7sxaO6V3ksy\ne7CB0oojpGzbzwbGPeZgFSEkBpJKLrgd350QgIu6/2FPaG8hUC7a4W8gmvhPHAfPDQuvBfxn0Fju\nt8/Rfrg3XnjblTHXYw0xRJXj++/23ej+IXseZaLNDpzMQO+5Cffd9n6a0V3sxIj2Zve1Pbj1saOx\n1v8jHzuRNP+P5AcXhmyOsRONh1u6oaHBgk7Yoia+A+JxOkqihmqVH33c51bkRh9uvYquKPn3UeLK\ntwJyX827KBMFGYIahXgcOSAe34HYAhE4NVGUjsNGs0Y7Tf10hCOIagdrp4fLCzOhTlcvFg7owLCD\nIIM+fgO/xkJSgy8wPZHxkNRhS3NXvPYkDENcyhDXO+4Bnp6hnZqeyI6bZkifBZVHfY22oNxpHzyL\nAXQaIxmaHk/1bftTOTw3V9qtFq4iOXHvN29C4+UxUjWhCY5bSim7wZ5J04khu4bbFMgg+8R0jmDB\nv+iifDMR4jWkT0ddUV1I5uyPYdCJjju3ULiYodNu/U4K94NhBC5CY1o9H6TO4nePh6CUUXltGuZq\n8JEwOdIWUXBKJBKQTw+K506ZNM0dt7XnK9wTJSj2NlngIcx4ZC3q0lULkaLcnChaYvua79IZiS7N\nNt3HsUIJbXhC29kGgb9508s2yvM6Vto2wuj3kDN3X/b6j4sQf5e3a51W2XM8U1LVBzvAUi9tult0\nkf7xdAxhfl3IfdvSnDpP6gc/eKJElXVYvh8/g9pfukMs8RaKPIXCMvsKvvnhOoUy0OrQD3aW0n0T\njOp3RyrexW2YwTDk0/ofwYv5BMflYuHkQ2/+WwCjfZZQqzSbThaLUi+oLtW1nQSL9WGrNUl+tDjp\nDb6ZpvNu0UG1TmsyuzqxHD+dBIkbqgEL34XTIc25EEd8UHRnYdzojIKbx9rBYDDYFo967CFdbdCV\n4jtAaQsyXG+b37G4Tja3tV2TOyEYKqVCUPUAiz0lX9kPQxAznTVvN3HlqE2gaSorsa7okJNbHtb7\njvOPXVpuZYDFTJkNuFl0eM61MLpFP8Sbo8Iak9ZOrRv7EyFrM+rnL8SUqxpaFi7XstDHGVW+utpw\n8c0lJfVFHJkMjDGHf+WGMhlEPb3fA5arzPj30nvq7iPAc88EKO35NFrpzj0hHZvC00wYC7pJIFbx\n6Qv5oVaANKgRoD1piOD0xYJnTeYeQJQ/EEY9nAo1vr4VugAuBURFQ6fINb1dGeqj9LteXSf2vuWP\nRvF784bGQzH5+YtJdMg5GH337GcbdxwW9ByVHcLnT5MLc7lPIfuqOINrzPsMmrVnc+437bx96uT7\ndxWaCXuZ7yL0p3y7X6V0Hbzv0Z36cSjh4gHY/+hkWNR8Adv0zkVAfyLfwiMIhA53TpS4O9RLlOgs\nYpwuuQwpfu/UywfukC6cCv+ocVbsYPA/W+/9udG8KRn/D8P5A/FYlzeycraBzeCy+dMHPopGh2sn\nWMpxyRhOVTvjpz9RGPobjKGEgZTR+Bwd+ojThmDTcdbwhDqZbHj4LPQTmSXqAXKnEUq7jWziBebO\n6a1vRTMxKE/1RnHjVUOsoLNOrkFKb8GpGkhxxUNdbSV6CUY2d+TIydTOTpCBySyAbwfvVN7y5k7J\nFoiNH1JL0x1uuPw1nvTb5a+O7m9X7VERfESDxgk41z7F9+29yjLATQsyW4gTX0THIvuW2Od/B3W0\n+aPZnZ0IOL+Doj8/x/HnEad/ih7/O25mztFPhK/4kJWLXPTnOL2TVZzzNClBOJS6wvErn+AVt3R8\nIjom0SRyJ48ohwNW7ogyXnz79NETf2qP/yztPqeoXHw4czr03yOfFDU=\n')]
+
+### Load the compressed module sources ###
+import sys, imp
+for name, source in module_sources:
+ source = source.decode("base64").decode("zlib")
+ mod = imp.new_module(name)
+ exec source in mod.__dict__
+ sys.modules[name] = mod
+
+### Original script follows ###
+#!/usr/bin/python
+"""%prog [-p|--props-file] [-r|--rev revision] [-b|--branch branch]
+ [-s|--shared-dir shared_dir] repo [dest]
+
+Tool to do safe operations with git.
+
+revision/branch on commandline will override those in props-file"""
+
+# Import snippet to find tools lib
+import os
+import site
+import logging
+site.addsitedir(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ "../../lib/python"))
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+from util.git import git
+
+
+if __name__ == '__main__':
+ from optparse import OptionParser
+
+ parser = OptionParser(__doc__)
+ parser.set_defaults(
+ revision=os.environ.get('GIT_REV'),
+ branch=os.environ.get('GIT_BRANCH', None),
+ propsfile=os.environ.get('PROPERTIES_FILE'),
+ loglevel=logging.INFO,
+ shared_dir=os.environ.get('GIT_SHARE_BASE_DIR'),
+ mirrors=None,
+ clean=False,
+ )
+ parser.add_option(
+ "-r", "--rev", dest="revision", help="which revision to update to")
+ parser.add_option(
+ "-b", "--branch", dest="branch", help="which branch to update to")
+ parser.add_option("-p", "--props-file", dest="propsfile",
+ help="build json file containing revision information")
+ parser.add_option("-s", "--shared-dir", dest="shared_dir",
+ help="clone to a shared directory")
+ parser.add_option("--mirror", dest="mirrors", action="append",
+ help="add a mirror to try cloning/pulling from before repo")
+ parser.add_option("--clean", dest="clean", action="store_true", default=False,
+ help="run 'git clean' after updating the local repository")
+ parser.add_option("-v", "--verbose", dest="loglevel",
+ action="store_const", const=logging.DEBUG)
+
+ options, args = parser.parse_args()
+
+ logging.basicConfig(
+ level=options.loglevel, format="%(asctime)s %(message)s")
+
+ if len(args) not in (1, 2):
+ parser.error("Invalid number of arguments")
+
+ repo = args[0]
+ if len(args) == 2:
+ dest = args[1]
+ else:
+ dest = os.path.basename(repo)
+
+ # Parse propsfile
+ if options.propsfile:
+ js = json.load(open(options.propsfile))
+ if options.revision is None:
+ options.revision = js['sourcestamp']['revision']
+ if options.branch is None:
+ options.branch = js['sourcestamp']['branch']
+
+ got_revision = git(repo, dest, options.branch, options.revision,
+ shareBase=options.shared_dir,
+ mirrors=options.mirrors,
+ clean_dest=options.clean,
+ )
+
+ print "Got revision %s" % got_revision
diff --git a/testing/mozharness/external_tools/machine-configuration.json b/testing/mozharness/external_tools/machine-configuration.json
new file mode 100644
index 000000000..29118c0fd
--- /dev/null
+++ b/testing/mozharness/external_tools/machine-configuration.json
@@ -0,0 +1,12 @@
+{
+ "win7": {
+ "screen_resolution": {
+ "x": 1280,
+ "y": 1024
+ },
+ "mouse_position": {
+ "x": 1010,
+ "y": 10
+ }
+ }
+}
diff --git a/testing/mozharness/external_tools/mouse_and_screen_resolution.py b/testing/mozharness/external_tools/mouse_and_screen_resolution.py
new file mode 100755
index 000000000..29e46e1bc
--- /dev/null
+++ b/testing/mozharness/external_tools/mouse_and_screen_resolution.py
@@ -0,0 +1,153 @@
+#! /usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# Script name: mouse_and_screen_resolution.py
+# Purpose: Sets mouse position and screen resolution for Windows 7 32-bit slaves
+# Author(s): Zambrano Gasparnian, Armen <armenzg@mozilla.com>
+# Target: Python 2.5 or newer
+#
+from optparse import OptionParser
+from ctypes import windll, Structure, c_ulong, byref
+try:
+ import json
+except:
+ import simplejson as json
+import os
+import sys
+import urllib2
+import socket
+import platform
+import time
+
+default_screen_resolution = {"x": 1024, "y": 768}
+default_mouse_position = {"x": 1010, "y": 10}
+
+def wfetch(url, retries=5):
+ while True:
+ try:
+ return urllib2.urlopen(url, timeout=30).read()
+ except urllib2.HTTPError, e:
+ print("Failed to fetch '%s': %s" % (url, str(e)))
+ except urllib2.URLError, e:
+ print("Failed to fetch '%s': %s" % (url, str(e)))
+ except socket.timeout, e:
+ print("Time out accessing %s: %s" % (url, str(e)))
+ except socket.error, e:
+ print("Socket error when accessing %s: %s" % (url, str(e)))
+ if retries < 0:
+ raise Exception("Could not fetch url '%s'" % url)
+ retries -= 1
+ print("Retrying")
+ time.sleep(60)
+
+def main():
+
+ if not (platform.version().startswith('6.1.760') and not 'PROGRAMFILES(X86)' in os.environ):
+ # We only want to run this for Windows 7 32-bit
+ print "INFO: This script was written to be used with Windows 7 32-bit machines."
+ return 0
+
+ parser = OptionParser()
+ parser.add_option(
+ "--configuration-url", dest="configuration_url", type="string",
+ help="Specifies the url of the configuration file.")
+ parser.add_option(
+ "--configuration-file", dest="configuration_file", type="string",
+ help="Specifies the path to the configuration file.")
+ (options, args) = parser.parse_args()
+
+ if (options.configuration_url == None and
+ options.configuration_file == None):
+ print "You must specify --configuration-url or --configuration-file."
+ return 1
+
+ if options.configuration_file:
+ with open(options.configuration_file) as f:
+ conf_dict = json.load(f)
+ new_screen_resolution = conf_dict["win7"]["screen_resolution"]
+ new_mouse_position = conf_dict["win7"]["mouse_position"]
+ else:
+ try:
+ conf_dict = json.loads(wfetch(options.configuration_url))
+ new_screen_resolution = conf_dict["win7"]["screen_resolution"]
+ new_mouse_position = conf_dict["win7"]["mouse_position"]
+ except urllib2.HTTPError, e:
+ print "This branch does not seem to have the configuration file %s" % str(e)
+ print "Let's fail over to 1024x768."
+ new_screen_resolution = default_screen_resolution
+ new_mouse_position = default_mouse_position
+ except urllib2.URLError, e:
+ print "INFRA-ERROR: We couldn't reach hg.mozilla.org: %s" % str(e)
+ return 1
+ except Exception, e:
+ print "ERROR: We were not expecting any more exceptions: %s" % str(e)
+ return 1
+
+ current_screen_resolution = queryScreenResolution()
+ print "Screen resolution (current): (%(x)s, %(y)s)" % (current_screen_resolution)
+
+ if current_screen_resolution == new_screen_resolution:
+ print "No need to change the screen resolution."
+ else:
+ print "Changing the screen resolution..."
+ try:
+ changeScreenResolution(new_screen_resolution["x"], new_screen_resolution["y"])
+ except Exception, e:
+ print "INFRA-ERROR: We have attempted to change the screen resolution but " + \
+ "something went wrong: %s" % str(e)
+ return 1
+ time.sleep(3) # just in case
+ current_screen_resolution = queryScreenResolution()
+ print "Screen resolution (new): (%(x)s, %(y)s)" % current_screen_resolution
+
+ print "Mouse position (current): (%(x)s, %(y)s)" % (queryMousePosition())
+ setCursorPos(new_mouse_position["x"], new_mouse_position["y"])
+ current_mouse_position = queryMousePosition()
+ print "Mouse position (new): (%(x)s, %(y)s)" % (current_mouse_position)
+
+ if current_screen_resolution != new_screen_resolution or current_mouse_position != new_mouse_position:
+ print "INFRA-ERROR: The new screen resolution or mouse positions are not what we expected"
+ return 1
+ else:
+ return 0
+
+class POINT(Structure):
+ _fields_ = [("x", c_ulong), ("y", c_ulong)]
+
+def queryMousePosition():
+ pt = POINT()
+ windll.user32.GetCursorPos(byref(pt))
+ return { "x": pt.x, "y": pt.y}
+
+def setCursorPos(x, y):
+ windll.user32.SetCursorPos(x, y)
+
+def queryScreenResolution():
+ return {"x": windll.user32.GetSystemMetrics(0),
+ "y": windll.user32.GetSystemMetrics(1)}
+
+def changeScreenResolution(xres = None, yres = None, BitsPerPixel = None):
+ import struct
+
+ DM_BITSPERPEL = 0x00040000
+ DM_PELSWIDTH = 0x00080000
+ DM_PELSHEIGHT = 0x00100000
+ CDS_FULLSCREEN = 0x00000004
+ SIZEOF_DEVMODE = 148
+
+ DevModeData = struct.calcsize("32BHH") * '\x00'
+ DevModeData += struct.pack("H", SIZEOF_DEVMODE)
+ DevModeData += struct.calcsize("H") * '\x00'
+ dwFields = (xres and DM_PELSWIDTH or 0) | (yres and DM_PELSHEIGHT or 0) | (BitsPerPixel and DM_BITSPERPEL or 0)
+ DevModeData += struct.pack("L", dwFields)
+ DevModeData += struct.calcsize("l9h32BHL") * '\x00'
+ DevModeData += struct.pack("LLL", BitsPerPixel or 0, xres or 0, yres or 0)
+ DevModeData += struct.calcsize("8L") * '\x00'
+
+ return windll.user32.ChangeDisplaySettingsA(DevModeData, 0)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/testing/mozharness/external_tools/performance-artifact-schema.json b/testing/mozharness/external_tools/performance-artifact-schema.json
new file mode 100644
index 000000000..f79a0419b
--- /dev/null
+++ b/testing/mozharness/external_tools/performance-artifact-schema.json
@@ -0,0 +1,164 @@
+{
+ "definitions": {
+ "framework_schema": {
+ "properties": {
+ "name": {
+ "title": "Framework name",
+ "type": "string"
+ }
+ },
+ "type": "object"
+ },
+ "subtest_schema": {
+ "properties": {
+ "name": {
+ "title": "Subtest name",
+ "type": "string"
+ },
+ "value": {
+ "description": "Summary value for subtest",
+ "title": "Subtest value",
+ "type": "number",
+ "minimum": -1000000000000.0,
+ "maximum": 1000000000000.0
+ },
+ "lowerIsBetter": {
+ "description": "Whether lower values are better for subtest",
+ "title": "Lower is better",
+ "type": "boolean"
+ },
+ "shouldAlert": {
+ "description": "Whether we should alert",
+ "title": "Should alert",
+ "type": "boolean"
+ },
+ "alertThreshold": {
+ "description": "% change threshold before alerting",
+ "title": "Alert threshold",
+ "type": "number",
+ "minimum": 0.0,
+ "maximum": 1000.0
+ },
+ "minBackWindow": {
+ "description": "Minimum back window to use for alerting",
+ "title": "Minimum back window",
+ "type": "number",
+ "minimum": 1,
+ "maximum": 255
+ },
+ "maxBackWindow": {
+ "description": "Maximum back window to use for alerting",
+ "title": "Maximum back window",
+ "type": "number",
+ "minimum": 1,
+ "maximum": 255
+ },
+ "foreWindow": {
+ "description": "Fore window to use for alerting",
+ "title": "Fore window",
+ "type": "number",
+ "minimum": 1,
+ "maximum": 255
+ }
+ },
+ "required": [
+ "name",
+ "value"
+ ],
+ "type": "object"
+ },
+ "suite_schema": {
+ "properties": {
+ "name": {
+ "title": "Suite name",
+ "type": "string"
+ },
+ "extraOptions": {
+ "type": "array",
+ "title": "Extra options used in running suite",
+ "items": {
+ "type": "string"
+ },
+ "uniqueItems": true
+ },
+ "subtests": {
+ "items": {
+ "$ref": "#/definitions/subtest_schema"
+ },
+ "title": "Subtests",
+ "type": "array"
+ },
+ "value": {
+ "title": "Suite value",
+ "type": "number",
+ "minimum": -1000000000000.0,
+ "maximum": 1000000000000.0
+ },
+ "lowerIsBetter": {
+ "description": "Whether lower values are better for suite",
+ "title": "Lower is better",
+ "type": "boolean"
+ },
+ "shouldAlert": {
+ "description": "Whether we should alert on this suite (overrides default behaviour)",
+ "title": "Should alert",
+ "type": "boolean"
+ },
+ "alertThreshold": {
+ "description": "% change threshold before alerting",
+ "title": "Alert threshold",
+ "type": "number",
+ "minimum": 0.0,
+ "maximum": 1000.0
+ },
+ "minBackWindow": {
+ "description": "Minimum back window to use for alerting",
+ "title": "Minimum back window",
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 255
+ },
+ "maxBackWindow": {
+ "description": "Maximum back window to use for alerting",
+ "title": "Maximum back window",
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 255
+ },
+ "foreWindow": {
+ "description": "Fore window to use for alerting",
+ "title": "Fore window",
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 255
+ }
+ },
+ "required": [
+ "name",
+ "subtests"
+ ],
+ "type": "object"
+ }
+ },
+ "description": "Structure for submitting performance data as part of a job",
+ "id": "https://treeherder.mozilla.org/schemas/v1/performance-artifact.json#",
+ "properties": {
+ "framework": {
+ "$ref": "#/definitions/framework_schema"
+ },
+ "suites": {
+ "description": "List of suite-level data submitted as part of this structure",
+ "items": {
+ "$ref": "#/definitions/suite_schema"
+ },
+ "title": "Performance suites",
+ "type": "array"
+ }
+ },
+ "required": [
+ "framework",
+ "suites"
+ ],
+ "title": "Perfherder Schema",
+ "type": "object"
+}
diff --git a/testing/mozharness/external_tools/robustcheckout.py b/testing/mozharness/external_tools/robustcheckout.py
new file mode 100644
index 000000000..e801724c1
--- /dev/null
+++ b/testing/mozharness/external_tools/robustcheckout.py
@@ -0,0 +1,451 @@
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""Robustly perform a checkout.
+
+This extension provides the ``hg robustcheckout`` command for
+ensuring a working directory is updated to the specified revision
+from a source repo using best practices to ensure optimal clone
+times and storage efficiency.
+"""
+
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import functools
+import os
+import random
+import re
+import socket
+import ssl
+import time
+import urllib2
+
+from mercurial.i18n import _
+from mercurial.node import hex
+from mercurial import (
+ commands,
+ error,
+ exchange,
+ extensions,
+ cmdutil,
+ hg,
+ registrar,
+ scmutil,
+ util,
+)
+
+testedwith = '3.7 3.8 3.9 4.0 4.1 4.2 4.3'
+minimumhgversion = '3.7'
+
+cmdtable = {}
+
+# Mercurial 4.3 introduced registrar.command as a replacement for
+# cmdutil.command.
+if util.safehasattr(registrar, 'command'):
+ command = registrar.command(cmdtable)
+else:
+ command = cmdutil.command(cmdtable)
+
+# Mercurial 4.2 introduced the vfs module and deprecated the symbol in
+# scmutil.
+def getvfs():
+ try:
+ from mercurial.vfs import vfs
+ return vfs
+ except ImportError:
+ return scmutil.vfs
+
+
+if os.name == 'nt':
+ import ctypes
+
+ # Get a reference to the DeleteFileW function
+ # DeleteFileW accepts filenames encoded as a null terminated sequence of
+ # wide chars (UTF-16). Python's ctypes.c_wchar_p correctly encodes unicode
+ # strings to null terminated UTF-16 strings.
+ # However, we receive (byte) strings from mercurial. When these are passed
+ # to DeleteFileW via the c_wchar_p type, they are implicitly decoded via
+ # the 'mbcs' encoding on windows.
+ kernel32 = ctypes.windll.kernel32
+ DeleteFile = kernel32.DeleteFileW
+ DeleteFile.argtypes = [ctypes.c_wchar_p]
+ DeleteFile.restype = ctypes.c_bool
+
+ def unlinklong(fn):
+ normalized_path = '\\\\?\\' + os.path.normpath(fn)
+ if not DeleteFile(normalized_path):
+ raise OSError(errno.EPERM, "couldn't remove long path", fn)
+
+# Not needed on other platforms, but is handy for testing
+else:
+ def unlinklong(fn):
+ os.unlink(fn)
+
+
+def unlinkwrapper(unlinkorig, fn, ui):
+ '''Calls unlink_long if original unlink function fails.'''
+ try:
+ ui.debug('calling unlink_orig %s\n' % fn)
+ return unlinkorig(fn)
+ except WindowsError as e:
+ # Windows error 3 corresponds to ERROR_PATH_NOT_FOUND
+ # only handle this case; re-raise the exception for other kinds of
+ # failures.
+ if e.winerror != 3:
+ raise
+ ui.debug('caught WindowsError ERROR_PATH_NOT_FOUND; '
+ 'calling unlink_long %s\n' % fn)
+ return unlinklong(fn)
+
+
+@contextlib.contextmanager
+def wrapunlink(ui):
+ '''Context manager that temporarily monkeypatches unlink functions.'''
+ purgemod = extensions.find('purge')
+ to_wrap = [(purgemod.util, 'unlink')]
+
+ # Pass along the ui object to the unlink_wrapper so we can get logging out
+ # of it.
+ wrapped = functools.partial(unlinkwrapper, ui=ui)
+
+ # Wrap the original function(s) with our unlink wrapper.
+ originals = {}
+ for mod, func in to_wrap:
+ ui.debug('wrapping %s %s\n' % (mod, func))
+ originals[mod, func] = extensions.wrapfunction(mod, func, wrapped)
+
+ try:
+ yield
+ finally:
+ # Restore the originals.
+ for mod, func in to_wrap:
+ ui.debug('restoring %s %s\n' % (mod, func))
+ setattr(mod, func, originals[mod, func])
+
+
+def purgewrapper(orig, ui, *args, **kwargs):
+ '''Runs original purge() command with unlink monkeypatched.'''
+ with wrapunlink(ui):
+ return orig(ui, *args, **kwargs)
+
+
+@command('robustcheckout', [
+ ('', 'upstream', '', 'URL of upstream repo to clone from'),
+ ('r', 'revision', '', 'Revision to check out'),
+ ('b', 'branch', '', 'Branch to check out'),
+ ('', 'purge', False, 'Whether to purge the working directory'),
+ ('', 'sharebase', '', 'Directory where shared repos should be placed'),
+ ('', 'networkattempts', 3, 'Maximum number of attempts for network '
+ 'operations'),
+ ],
+ '[OPTION]... URL DEST',
+ norepo=True)
+def robustcheckout(ui, url, dest, upstream=None, revision=None, branch=None,
+ purge=False, sharebase=None, networkattempts=None):
+ """Ensure a working copy has the specified revision checked out."""
+ if not revision and not branch:
+ raise error.Abort('must specify one of --revision or --branch')
+
+ if revision and branch:
+ raise error.Abort('cannot specify both --revision and --branch')
+
+ # Require revision to look like a SHA-1.
+ if revision:
+ if len(revision) < 12 or len(revision) > 40 or not re.match('^[a-f0-9]+$', revision):
+ raise error.Abort('--revision must be a SHA-1 fragment 12-40 '
+ 'characters long')
+
+ sharebase = sharebase or ui.config('share', 'pool')
+ if not sharebase:
+ raise error.Abort('share base directory not defined; refusing to operate',
+ hint='define share.pool config option or pass --sharebase')
+
+ # worker.backgroundclose only makes things faster if running anti-virus,
+ # which our automation doesn't. Disable it.
+ ui.setconfig('worker', 'backgroundclose', False)
+
+ # By default the progress bar starts after 3s and updates every 0.1s. We
+ # change this so it shows and updates every 1.0s.
+ # We also tell progress to assume a TTY is present so updates are printed
+ # even if there is no known TTY.
+ # We make the config change here instead of in a config file because
+ # otherwise we're at the whim of whatever configs are used in automation.
+ ui.setconfig('progress', 'delay', 1.0)
+ ui.setconfig('progress', 'refresh', 1.0)
+ ui.setconfig('progress', 'assume-tty', True)
+
+ sharebase = os.path.realpath(sharebase)
+
+ return _docheckout(ui, url, dest, upstream, revision, branch, purge,
+ sharebase, networkattempts)
+
+def _docheckout(ui, url, dest, upstream, revision, branch, purge, sharebase,
+ networkattemptlimit, networkattempts=None):
+ if not networkattempts:
+ networkattempts = [1]
+
+ def callself():
+ return _docheckout(ui, url, dest, upstream, revision, branch, purge,
+ sharebase, networkattemptlimit, networkattempts)
+
+ ui.write('ensuring %s@%s is available at %s\n' % (url, revision or branch,
+ dest))
+
+ # We assume that we're the only process on the machine touching the
+ # repository paths that we were told to use. This means our recovery
+ # scenario when things aren't "right" is to just nuke things and start
+ # from scratch. This is easier to implement than verifying the state
+ # of the data and attempting recovery. And in some scenarios (such as
+ # potential repo corruption), it is probably faster, since verifying
+ # repos can take a while.
+
+ destvfs = getvfs()(dest, audit=False, realpath=True)
+
+ def deletesharedstore(path=None):
+ storepath = path or destvfs.read('.hg/sharedpath').strip()
+ if storepath.endswith('.hg'):
+ storepath = os.path.dirname(storepath)
+
+ storevfs = getvfs()(storepath, audit=False)
+ storevfs.rmtree(forcibly=True)
+
+ if destvfs.exists() and not destvfs.exists('.hg'):
+ raise error.Abort('destination exists but no .hg directory')
+
+ # Require checkouts to be tied to shared storage because efficiency.
+ if destvfs.exists('.hg') and not destvfs.exists('.hg/sharedpath'):
+ ui.warn('(destination is not shared; deleting)\n')
+ destvfs.rmtree(forcibly=True)
+
+ # Verify the shared path exists and is using modern pooled storage.
+ if destvfs.exists('.hg/sharedpath'):
+ storepath = destvfs.read('.hg/sharedpath').strip()
+
+ ui.write('(existing repository shared store: %s)\n' % storepath)
+
+ if not os.path.exists(storepath):
+ ui.warn('(shared store does not exist; deleting destination)\n')
+ destvfs.rmtree(forcibly=True)
+ elif not re.search('[a-f0-9]{40}/\.hg$', storepath.replace('\\', '/')):
+ ui.warn('(shared store does not belong to pooled storage; '
+ 'deleting destination to improve efficiency)\n')
+ destvfs.rmtree(forcibly=True)
+
+ storevfs = getvfs()(storepath, audit=False)
+ if storevfs.isfileorlink('store/lock'):
+ ui.warn('(shared store has an active lock; assuming it is left '
+ 'over from a previous process and that the store is '
+ 'corrupt; deleting store and destination just to be '
+ 'sure)\n')
+ destvfs.rmtree(forcibly=True)
+ deletesharedstore(storepath)
+
+ # FUTURE when we require generaldelta, this is where we can check
+ # for that.
+
+ if destvfs.isfileorlink('.hg/wlock'):
+ ui.warn('(dest has an active working directory lock; assuming it is '
+ 'left over from a previous process and that the destination '
+ 'is corrupt; deleting it just to be sure)\n')
+ destvfs.rmtree(forcibly=True)
+
+ def handlerepoerror(e):
+ if e.message == _('abandoned transaction found'):
+ ui.warn('(abandoned transaction found; trying to recover)\n')
+ repo = hg.repository(ui, dest)
+ if not repo.recover():
+ ui.warn('(could not recover repo state; '
+ 'deleting shared store)\n')
+ deletesharedstore()
+
+ ui.warn('(attempting checkout from beginning)\n')
+ return callself()
+
+ raise
+
+ # At this point we either have an existing working directory using
+ # shared, pooled storage or we have nothing.
+
+ def handlenetworkfailure():
+ if networkattempts[0] >= networkattemptlimit:
+ raise error.Abort('reached maximum number of network attempts; '
+ 'giving up\n')
+
+ ui.warn('(retrying after network failure on attempt %d of %d)\n' %
+ (networkattempts[0], networkattemptlimit))
+
+ # Do a backoff on retries to mitigate the thundering herd
+ # problem. This is an exponential backoff with a multipler
+ # plus random jitter thrown in for good measure.
+ # With the default settings, backoffs will be:
+ # 1) 2.5 - 6.5
+ # 2) 5.5 - 9.5
+ # 3) 11.5 - 15.5
+ backoff = (2 ** networkattempts[0] - 1) * 1.5
+ jittermin = ui.configint('robustcheckout', 'retryjittermin', 1000)
+ jittermax = ui.configint('robustcheckout', 'retryjittermax', 5000)
+ backoff += float(random.randint(jittermin, jittermax)) / 1000.0
+ ui.warn('(waiting %.2fs before retry)\n' % backoff)
+ time.sleep(backoff)
+
+ networkattempts[0] += 1
+
+ def handlepullerror(e):
+ """Handle an exception raised during a pull.
+
+ Returns True if caller should call ``callself()`` to retry.
+ """
+ if isinstance(e, error.Abort):
+ if e.args[0] == _('repository is unrelated'):
+ ui.warn('(repository is unrelated; deleting)\n')
+ destvfs.rmtree(forcibly=True)
+ return True
+ elif e.args[0].startswith(_('stream ended unexpectedly')):
+ ui.warn('%s\n' % e.args[0])
+ # Will raise if failure limit reached.
+ handlenetworkfailure()
+ return True
+ elif isinstance(e, ssl.SSLError):
+ # Assume all SSL errors are due to the network, as Mercurial
+ # should convert non-transport errors like cert validation failures
+ # to error.Abort.
+ ui.warn('ssl error: %s\n' % e)
+ handlenetworkfailure()
+ return True
+ elif isinstance(e, urllib2.URLError):
+ if isinstance(e.reason, socket.error):
+ ui.warn('socket error: %s\n' % e.reason)
+ handlenetworkfailure()
+ return True
+
+ return False
+
+ created = False
+
+ if not destvfs.exists():
+ # Ensure parent directories of destination exist.
+ # Mercurial 3.8 removed ensuredirs and made makedirs race safe.
+ if util.safehasattr(util, 'ensuredirs'):
+ makedirs = util.ensuredirs
+ else:
+ makedirs = util.makedirs
+
+ makedirs(os.path.dirname(destvfs.base), notindexed=True)
+ makedirs(sharebase, notindexed=True)
+
+ if upstream:
+ ui.write('(cloning from upstream repo %s)\n' % upstream)
+ cloneurl = upstream or url
+
+ try:
+ res = hg.clone(ui, {}, cloneurl, dest=dest, update=False,
+ shareopts={'pool': sharebase, 'mode': 'identity'})
+ except (error.Abort, ssl.SSLError, urllib2.URLError) as e:
+ if handlepullerror(e):
+ return callself()
+ raise
+ except error.RepoError as e:
+ return handlerepoerror(e)
+ except error.RevlogError as e:
+ ui.warn('(repo corruption: %s; deleting shared store)\n' % e.message)
+ deletesharedstore()
+ return callself()
+
+ # TODO retry here.
+ if res is None:
+ raise error.Abort('clone failed')
+
+ # Verify it is using shared pool storage.
+ if not destvfs.exists('.hg/sharedpath'):
+ raise error.Abort('clone did not create a shared repo')
+
+ created = True
+
+ # The destination .hg directory should exist. Now make sure we have the
+ # wanted revision.
+
+ repo = hg.repository(ui, dest)
+
+ # We only pull if we are using symbolic names or the requested revision
+ # doesn't exist.
+ havewantedrev = False
+ if revision and revision in repo:
+ ctx = repo[revision]
+
+ if not ctx.hex().startswith(revision):
+ raise error.Abort('--revision argument is ambiguous',
+ hint='must be the first 12+ characters of a '
+ 'SHA-1 fragment')
+
+ checkoutrevision = ctx.hex()
+ havewantedrev = True
+
+ if not havewantedrev:
+ ui.write('(pulling to obtain %s)\n' % (revision or branch,))
+
+ remote = None
+ try:
+ remote = hg.peer(repo, {}, url)
+ pullrevs = [remote.lookup(revision or branch)]
+ checkoutrevision = hex(pullrevs[0])
+ if branch:
+ ui.warn('(remote resolved %s to %s; '
+ 'result is not deterministic)\n' %
+ (branch, checkoutrevision))
+
+ if checkoutrevision in repo:
+ ui.warn('(revision already present locally; not pulling)\n')
+ else:
+ pullop = exchange.pull(repo, remote, heads=pullrevs)
+ if not pullop.rheads:
+ raise error.Abort('unable to pull requested revision')
+ except (error.Abort, ssl.SSLError, urllib2.URLError) as e:
+ if handlepullerror(e):
+ return callself()
+ raise
+ except error.RepoError as e:
+ return handlerepoerror(e)
+ except error.RevlogError as e:
+ ui.warn('(repo corruption: %s; deleting shared store)\n' % e.message)
+ deletesharedstore()
+ return callself()
+ finally:
+ if remote:
+ remote.close()
+
+ # Now we should have the wanted revision in the store. Perform
+ # working directory manipulation.
+
+ # Purge if requested. We purge before update because this way we're
+ # guaranteed to not have conflicts on `hg update`.
+ if purge and not created:
+ ui.write('(purging working directory)\n')
+ purgeext = extensions.find('purge')
+
+ if purgeext.purge(ui, repo, all=True, abort_on_err=True,
+ # The function expects all arguments to be
+ # defined.
+ **{'print': None, 'print0': None, 'dirs': None,
+ 'files': None}):
+ raise error.Abort('error purging')
+
+ # Update the working directory.
+ if commands.update(ui, repo, rev=checkoutrevision, clean=True):
+ raise error.Abort('error updating')
+
+ ui.write('updated to %s\n' % checkoutrevision)
+ return None
+
+
+def extsetup(ui):
+ # Ensure required extensions are loaded.
+ for ext in ('purge', 'share'):
+ try:
+ extensions.find(ext)
+ except KeyError:
+ extensions.load(ui, ext, None)
+
+ purgemod = extensions.find('purge')
+ extensions.wrapcommand(purgemod.cmdtable, 'purge', purgewrapper)
diff --git a/testing/mozharness/external_tools/virtualenv/AUTHORS.txt b/testing/mozharness/external_tools/virtualenv/AUTHORS.txt
new file mode 100644
index 000000000..272494163
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/AUTHORS.txt
@@ -0,0 +1,91 @@
+Author
+------
+
+Ian Bicking
+
+Maintainers
+-----------
+
+Brian Rosner
+Carl Meyer
+Jannis Leidel
+Paul Moore
+Paul Nasrat
+Marcus Smith
+
+Contributors
+------------
+
+Alex Grönholm
+Anatoly Techtonik
+Antonio Cuni
+Antonio Valentino
+Armin Ronacher
+Barry Warsaw
+Benjamin Root
+Bradley Ayers
+Branden Rolston
+Brandon Carl
+Brian Kearns
+Cap Petschulat
+CBWhiz
+Chris Adams
+Chris McDonough
+Christos Kontas
+Christian Hudon
+Christian Stefanescu
+Christopher Nilsson
+Cliff Xuan
+Curt Micol
+Damien Nozay
+Dan Sully
+Daniel Hahler
+Daniel Holth
+David Schoonover
+Denis Costa
+Doug Hellmann
+Doug Napoleone
+Douglas Creager
+Eduard-Cristian Stefan
+Erik M. Bray
+Ethan Jucovy
+Gabriel de Perthuis
+Gunnlaugur Thor Briem
+Graham Dennis
+Greg Haskins
+Jason Penney
+Jason R. Coombs
+Jeff Hammel
+Jeremy Orem
+Jason Penney
+Jason R. Coombs
+John Kleint
+Jonathan Griffin
+Jonathan Hitchcock
+Jorge Vargas
+Josh Bronson
+Kamil Kisiel
+Kyle Gibson
+Konstantin Zemlyak
+Kumar McMillan
+Lars Francke
+Marc Abramowitz
+Mika Laitio
+Mike Hommey
+Miki Tebeka
+Philip Jenvey
+Philippe Ombredanne
+Piotr Dobrogost
+Preston Holmes
+Ralf Schmitt
+Raul Leal
+Ronny Pfannschmidt
+Satrajit Ghosh
+Sergio de Carvalho
+Stefano Rivera
+Tarek Ziadé
+Thomas Aglassinger
+Vinay Sajip
+Vitaly Babiy
+Vladimir Rutsky
+Wang Xuerui \ No newline at end of file
diff --git a/testing/mozharness/external_tools/virtualenv/LICENSE.txt b/testing/mozharness/external_tools/virtualenv/LICENSE.txt
new file mode 100644
index 000000000..ab145001f
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/LICENSE.txt
@@ -0,0 +1,22 @@
+Copyright (c) 2007 Ian Bicking and Contributors
+Copyright (c) 2009 Ian Bicking, The Open Planning Project
+Copyright (c) 2011-2016 The virtualenv developers
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/testing/mozharness/external_tools/virtualenv/MANIFEST.in b/testing/mozharness/external_tools/virtualenv/MANIFEST.in
new file mode 100644
index 000000000..49037ada6
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/MANIFEST.in
@@ -0,0 +1,12 @@
+recursive-include docs *
+recursive-include tests *.py *.sh *.expected
+recursive-include virtualenv_support *.whl
+recursive-include virtualenv_embedded *
+recursive-exclude docs/_templates *
+recursive-exclude docs/_build *
+include virtualenv_support/__init__.py
+include bin/*
+include scripts/*
+include *.py
+include AUTHORS.txt
+include LICENSE.txt
diff --git a/testing/mozharness/external_tools/virtualenv/PKG-INFO b/testing/mozharness/external_tools/virtualenv/PKG-INFO
new file mode 100644
index 000000000..dbfda645d
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/PKG-INFO
@@ -0,0 +1,87 @@
+Metadata-Version: 1.1
+Name: virtualenv
+Version: 15.0.1
+Summary: Virtual Python Environment builder
+Home-page: https://virtualenv.pypa.io/
+Author: Jannis Leidel, Carl Meyer and Brian Rosner
+Author-email: python-virtualenv@groups.google.com
+License: MIT
+Description: Virtualenv
+ ==========
+
+ `Mailing list <http://groups.google.com/group/python-virtualenv>`_ |
+ `Issues <https://github.com/pypa/virtualenv/issues>`_ |
+ `Github <https://github.com/pypa/virtualenv>`_ |
+ `PyPI <https://pypi.python.org/pypi/virtualenv/>`_ |
+ User IRC: #pypa
+ Dev IRC: #pypa-dev
+
+ Introduction
+ ------------
+
+ ``virtualenv`` is a tool to create isolated Python environments.
+
+ The basic problem being addressed is one of dependencies and versions,
+ and indirectly permissions. Imagine you have an application that
+ needs version 1 of LibFoo, but another application requires version
+ 2. How can you use both these applications? If you install
+ everything into ``/usr/lib/python2.7/site-packages`` (or whatever your
+ platform's standard location is), it's easy to end up in a situation
+ where you unintentionally upgrade an application that shouldn't be
+ upgraded.
+
+ Or more generally, what if you want to install an application *and
+ leave it be*? If an application works, any change in its libraries or
+ the versions of those libraries can break the application.
+
+ Also, what if you can't install packages into the global
+ ``site-packages`` directory? For instance, on a shared host.
+
+ In all these cases, ``virtualenv`` can help you. It creates an
+ environment that has its own installation directories, that doesn't
+ share libraries with other virtualenv environments (and optionally
+ doesn't access the globally installed libraries either).
+
+ .. comment:
+
+ Release History
+ ===============
+
+ 15.0.1 (2016-03-17)
+ -------------------
+
+ * Print error message when DEST_DIR exists and is a file
+
+ * Upgrade setuptools to 20.3
+
+ * Upgrade pip to 8.1.1.
+
+
+ 15.0.0 (2016-03-05)
+ -------------------
+
+ * Remove the `virtualenv-N.N` script from the package; this can no longer be
+ correctly created from a wheel installation.
+ Resolves #851, #692
+
+ * Remove accidental runtime dependency on pip by extracting certificate in the
+ subprocess.
+
+ * Upgrade setuptools 20.2.2.
+
+ * Upgrade pip to 8.1.0.
+
+
+ `Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.
+Keywords: setuptools deployment installation distutils
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/testing/mozharness/external_tools/virtualenv/README.rst b/testing/mozharness/external_tools/virtualenv/README.rst
new file mode 100644
index 000000000..0d5984dce
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/README.rst
@@ -0,0 +1,31 @@
+virtualenv
+==========
+
+A tool for creating isolated 'virtual' python environments.
+
+.. image:: https://img.shields.io/pypi/v/virtualenv.svg
+ :target: https://pypi.python.org/pypi/virtualenv
+
+.. image:: https://img.shields.io/travis/pypa/virtualenv/develop.svg
+ :target: http://travis-ci.org/pypa/virtualenv
+
+* `Installation <https://virtualenv.pypa.io/en/latest/installation.html>`_
+* `Documentation <https://virtualenv.pypa.io/>`_
+* `Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_
+* `Issues <https://github.com/pypa/virtualenv/issues>`_
+* `PyPI <https://pypi.python.org/pypi/virtualenv/>`_
+* `Github <https://github.com/pypa/virtualenv>`_
+* `User mailing list <http://groups.google.com/group/python-virtualenv>`_
+* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_
+* User IRC: #pypa on Freenode.
+* Dev IRC: #pypa-dev on Freenode.
+
+
+Code of Conduct
+---------------
+
+Everyone interacting in the virtualenv project's codebases, issue trackers,
+chat rooms, and mailing lists is expected to follow the
+`PyPA Code of Conduct`_.
+
+.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
diff --git a/testing/mozharness/external_tools/virtualenv/bin/rebuild-script.py b/testing/mozharness/external_tools/virtualenv/bin/rebuild-script.py
new file mode 100755
index 000000000..a816af3eb
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/bin/rebuild-script.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+"""
+Helper script to rebuild virtualenv.py from virtualenv_support
+"""
+from __future__ import print_function
+
+import os
+import re
+import codecs
+from zlib import crc32
+
+here = os.path.dirname(__file__)
+script = os.path.join(here, '..', 'virtualenv.py')
+
+gzip = codecs.lookup('zlib')
+b64 = codecs.lookup('base64')
+
+file_regex = re.compile(
+ br'##file (.*?)\n([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*convert\("""\n(.*?)"""\)',
+ re.S)
+file_template = b'##file %(filename)s\n%(varname)s = convert("""\n%(data)s""")'
+
+def rebuild(script_path):
+ with open(script_path, 'rb') as f:
+ script_content = f.read()
+ parts = []
+ last_pos = 0
+ match = None
+ for match in file_regex.finditer(script_content):
+ parts += [script_content[last_pos:match.start()]]
+ last_pos = match.end()
+ filename, fn_decoded = match.group(1), match.group(1).decode()
+ varname = match.group(2)
+ data = match.group(3)
+
+ print('Found file %s' % fn_decoded)
+ pathname = os.path.join(here, '..', 'virtualenv_embedded', fn_decoded)
+
+ with open(pathname, 'rb') as f:
+ embedded = f.read()
+ new_crc = crc32(embedded)
+ new_data = b64.encode(gzip.encode(embedded)[0])[0]
+
+ if new_data == data:
+ print(' File up to date (crc: %s)' % new_crc)
+ parts += [match.group(0)]
+ continue
+ # Else: content has changed
+ crc = crc32(gzip.decode(b64.decode(data)[0])[0])
+ print(' Content changed (crc: %s -> %s)' %
+ (crc, new_crc))
+ new_match = file_template % {
+ b'filename': filename,
+ b'varname': varname,
+ b'data': new_data
+ }
+ parts += [new_match]
+
+ parts += [script_content[last_pos:]]
+ new_content = b''.join(parts)
+
+ if new_content != script_content:
+ print('Content updated; overwriting... ', end='')
+ with open(script_path, 'wb') as f:
+ f.write(new_content)
+ print('done.')
+ else:
+ print('No changes in content')
+ if match is None:
+ print('No variables were matched/found')
+
+if __name__ == '__main__':
+ rebuild(script)
diff --git a/testing/mozharness/external_tools/virtualenv/docs/Makefile b/testing/mozharness/external_tools/virtualenv/docs/Makefile
new file mode 100644
index 000000000..e4de9f847
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-compressor.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-compressor.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/django-compressor"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-compressor"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/mozharness/external_tools/virtualenv/docs/changes.rst b/testing/mozharness/external_tools/virtualenv/docs/changes.rst
new file mode 100644
index 000000000..2df19f666
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/changes.rst
@@ -0,0 +1,985 @@
+Release History
+===============
+
+15.0.1 (2016-03-17)
+-------------------
+
+* Print error message when DEST_DIR exists and is a file
+
+* Upgrade setuptools to 20.3
+
+* Upgrade pip to 8.1.1.
+
+
+15.0.0 (2016-03-05)
+-------------------
+
+* Remove the `virtualenv-N.N` script from the package; this can no longer be
+ correctly created from a wheel installation.
+ Resolves :issue:`851`, :issue:`692`
+
+* Remove accidental runtime dependency on pip by extracting certificate in the
+ subprocess.
+
+* Upgrade setuptools 20.2.2.
+
+* Upgrade pip to 8.1.0.
+
+
+14.0.6 (2016-02-07)
+-------------------
+
+* Upgrade setuptools to 20.0
+
+* Upgrade wheel to 0.29.0
+
+* Fix an error where virtualenv didn't pass in a working ssl certificate for
+ pip, causing "weird" errors related to ssl.
+
+
+14.0.5 (2016-02-01)
+-------------------
+
+* Homogenize drive letter casing for both prefixes and filenames. :issue:`858`
+
+
+14.0.4 (2016-01-31)
+-------------------
+
+* Upgrade setuptools to 19.6.2
+
+* Revert ac4ea65; only correct drive letter case.
+ Fixes :issue:`856`, :issue:`815`
+
+
+14.0.3 (2016-01-28)
+-------------------
+
+* Upgrade setuptools to 19.6.1
+
+
+14.0.2 (2016-01-28)
+-------------------
+
+* Upgrade setuptools to 19.6
+
+* Supress any errors from `unset` on different shells (:pull:`843`)
+
+* Normalize letter case for prefix path checking. Fixes :issue:`837`
+
+
+14.0.1 (2016-01-21)
+-------------------
+
+* Upgrade from pip 8.0.0 to 8.0.2.
+
+* Fix the default of ``--(no-)download`` to default to downloading.
+
+
+14.0.0 (2016-01-19)
+-------------------
+
+* **BACKWARDS INCOMPATIBLE** Drop support for Python 3.2.
+
+* Upgrade setuptools to 19.4
+
+* Upgrade wheel to 0.26.0
+
+* Upgrade pip to 8.0.0
+
+* Upgrade argparse to 1.4.0
+
+* Added support for ``python-config`` script (:pull:`798`)
+
+* Updated activate.fish (:pull:`589`) (:pull:`799`)
+
+* Account for a ``site.pyo`` correctly in some python implementations (:pull:`759`)
+
+* Properly restore an empty PS1 (:issue:`407`)
+
+* Properly remove ``pydoc`` when deactivating
+
+* Remove workaround for very old Mageia / Mandriva linuxes (:pull:`472`)
+
+* Added a space after virtualenv name in the prompt: ``(env) $PS1``
+
+* Make sure not to run a --user install when creating the virtualenv (:pull:`803`)
+
+* Remove virtualenv.py's path from sys.path when executing with a new
+ python. Fixes issue :issue:`779`, :issue:`763` (:pull:`805`)
+
+* Remove use of () in .bat files so ``Program Files (x86)`` works :issue:`35`
+
+* Download new releases of the preinstalled software from PyPI when there are
+ new releases available. This behavior can be disabled using
+ ``--no-download``.
+
+* Make ``--no-setuptools``, ``--no-pip``, and ``--no-wheel`` independent of
+ each other.
+
+
+13.1.2 (2015-08-23)
+-------------------
+
+* Upgrade pip to 7.1.2.
+
+
+13.1.1 (2015-08-20)
+-------------------
+
+* Upgrade pip to 7.1.1.
+
+* Upgrade setuptools to 18.2.
+
+* Make the activate script safe to use when bash is running with ``-u``.
+
+
+13.1.0 (2015-06-30)
+-------------------
+
+* Upgrade pip to 7.1.0
+
+* Upgrade setuptools to 18.0.1
+
+
+13.0.3 (2015-06-01)
+-------------------
+
+* Upgrade pip to 7.0.3
+
+
+13.0.2 (2015-06-01)
+-------------------
+
+* Upgrade pip to 7.0.2
+
+* Upgrade setuptools to 17.0
+
+
+13.0.1 (2015-05-22)
+-------------------
+
+* Upgrade pip to 7.0.1
+
+
+13.0.0 (2015-05-21)
+-------------------
+
+* Automatically install wheel when creating a new virutalenv. This can be
+ disabled by using the ``--no-wheel`` option.
+
+* Don't trust the current directory as a location to discover files to install
+ packages from.
+
+* Upgrade setuptools to 16.0.
+
+* Upgrade pip to 7.0.0.
+
+
+12.1.1 (2015-04-07)
+-------------------
+
+* Upgrade pip to 6.1.1
+
+
+12.1.0 (2015-04-07)
+-------------------
+
+* Upgrade setuptools to 15.0
+
+* Upgrade pip to 6.1.0
+
+
+12.0.7 (2015-02-04)
+-------------------
+
+* Upgrade pip to 6.0.8
+
+
+12.0.6 (2015-01-28)
+-------------------
+
+* Upgrade pip to 6.0.7
+
+* Upgrade setuptools to 12.0.5
+
+
+12.0.5 (2015-01-03)
+-------------------
+
+* Upgrade pip to 6.0.6
+
+* Upgrade setuptools to 11.0
+
+
+12.0.4 (2014-12-23)
+-------------------
+
+* Revert the fix to ``-p`` on Debian based pythons as it was broken in other
+ situations.
+
+* Revert several sys.path changes new in 12.0 which were breaking virtualenv.
+
+12.0.3 (2014-12-23)
+-------------------
+
+* Fix an issue where Debian based Pythons would fail when using -p with the
+ host Python.
+
+* Upgrade pip to 6.0.3
+
+12.0.2 (2014-12-23)
+-------------------
+
+* Upgraded pip to 6.0.2
+
+12.0.1 (2014-12-22)
+-------------------
+
+* Upgraded pip to 6.0.1
+
+
+12.0 (2014-12-22)
+-----------------
+
+* **PROCESS** Version numbers are now simply ``X.Y`` where the leading ``1``
+ has been dropped.
+* Split up documentation into structured pages
+* Now using pytest framework
+* Correct sys.path ordering for debian, issue #461
+* Correctly throws error on older Pythons, issue #619
+* Allow for empty $PATH, pull #601
+* Don't set prompt if $env:VIRTUAL_ENV_DISABLE_PROMPT is set for Powershell
+* Updated setuptools to 7.0
+
+1.11.6 (2014-05-16)
+-------------------
+
+* Updated setuptools to 3.6
+* Updated pip to 1.5.6
+
+1.11.5 (2014-05-03)
+-------------------
+
+* Updated setuptools to 3.4.4
+* Updated documentation to use https://virtualenv.pypa.io/
+* Updated pip to 1.5.5
+
+1.11.4 (2014-02-21)
+-------------------
+
+* Updated pip to 1.5.4
+
+
+1.11.3 (2014-02-20)
+-------------------
+
+* Updated setuptools to 2.2
+* Updated pip to 1.5.3
+
+
+1.11.2 (2014-01-26)
+-------------------
+
+* Fixed easy_install installed virtualenvs by updated pip to 1.5.2
+
+1.11.1 (2014-01-20)
+-------------------
+
+* Fixed an issue where pip and setuptools were not getting installed when using
+ the ``--system-site-packages`` flag.
+* Updated setuptools to fix an issue when installed with easy_install
+* Fixed an issue with Python 3.4 and sys.stdout encoding being set to ascii
+* Upgraded pip to v1.5.1
+* Upgraded setuptools to v2.1
+
+1.11 (2014-01-02)
+-----------------
+
+* **BACKWARDS INCOMPATIBLE** Switched to using wheels for the bundled copies of
+ setuptools and pip. Using sdists is no longer supported - users supplying
+ their own versions of pip/setuptools will need to provide wheels.
+* **BACKWARDS INCOMPATIBLE** Modified the handling of ``--extra-search-dirs``.
+ This option now works like pip's ``--find-links`` option, in that it adds
+ extra directories to search for compatible wheels for pip and setuptools.
+ The actual wheel selected is chosen based on version and compatibility, using
+ the same algorithm as ``pip install setuptools``.
+* Fixed #495, --always-copy was failing (#PR 511)
+* Upgraded pip to v1.5
+* Upgraded setuptools to v1.4
+
+1.10.1 (2013-08-07)
+-------------------
+
+* **New Signing Key** Release 1.10.1 is using a different key than normal with
+ fingerprint: 7C6B 7C5D 5E2B 6356 A926 F04F 6E3C BCE9 3372 DCFA
+* Upgraded pip to v1.4.1
+* Upgraded setuptools to v0.9.8
+
+
+1.10 (2013-07-23)
+-----------------
+
+* **BACKWARDS INCOMPATIBLE** Dropped support for Python 2.5. The minimum
+ supported Python version is now Python 2.6.
+
+* **BACKWARDS INCOMPATIBLE** Using ``virtualenv.py`` as an isolated script
+ (i.e. without an associated ``virtualenv_support`` directory) is no longer
+ supported for security reasons and will fail with an error.
+
+ Along with this, ``--never-download`` is now always pinned to ``True``, and
+ is only being maintained in the short term for backward compatibility
+ (Pull #412).
+
+* **IMPORTANT** Switched to the new setuptools (v0.9.7) which has been merged
+ with Distribute_ again and works for Python 2 and 3 with one codebase.
+ The ``--distribute`` and ``--setuptools`` options are now no-op.
+
+* Updated to pip 1.4.
+
+* Added support for PyPy3k
+
+* Added the option to use a version number with the ``-p`` option to get the
+ system copy of that Python version (Windows only)
+
+* Removed embedded ``ez_setup.py``, ``distribute_setup.py`` and
+ ``distribute_from_egg.py`` files as part of switching to merged setuptools.
+
+* Fixed ``--relocatable`` to work better on Windows.
+
+* Fixed issue with readline on Windows.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+
+1.9.1 (2013-03-08)
+------------------
+
+* Updated to pip 1.3.1 that fixed a major backward incompatible change of
+ parsing URLs to externally hosted packages that got accidentily included
+ in pip 1.3.
+
+1.9 (2013-03-07)
+----------------
+
+* Unset VIRTUAL_ENV environment variable in deactivate.bat (Pull #364)
+* Upgraded distribute to 0.6.34.
+* Added ``--no-setuptools`` and ``--no-pip`` options (Pull #336).
+* Fixed Issue #373. virtualenv-1.8.4 was failing in cygwin (Pull #382).
+* Fixed Issue #378. virtualenv is now "multiarch" aware on debian/ubuntu (Pull #379).
+* Fixed issue with readline module path on pypy and OSX (Pull #374).
+* Made 64bit detection compatible with Python 2.5 (Pull #393).
+
+
+1.8.4 (2012-11-25)
+------------------
+
+* Updated distribute to 0.6.31. This fixes #359 (numpy install regression) on
+ UTF-8 platforms, and provides a workaround on other platforms:
+ ``PYTHONIOENCODING=utf8 pip install numpy``.
+
+* When installing virtualenv via curl, don't forget to filter out arguments
+ the distribute setup script won't understand. Fixes #358.
+
+* Added some more integration tests.
+
+* Removed the unsupported embedded setuptools egg for Python 2.4 to reduce
+ file size.
+
+1.8.3 (2012-11-21)
+------------------
+
+* Fixed readline on OS X. Thanks minrk
+
+* Updated distribute to 0.6.30 (improves our error reporting, plus new
+ distribute features and fixes). Thanks Gabriel (g2p)
+
+* Added compatibility with multiarch Python (Python 3.3 for example). Added an
+ integration test. Thanks Gabriel (g2p)
+
+* Added ability to install distribute from a user-provided egg, rather than the
+ bundled sdist, for better speed. Thanks Paul Moore.
+
+* Make the creation of lib64 symlink smarter about already-existing symlink,
+ and more explicit about full paths. Fixes #334 and #330. Thanks Jeremy Orem.
+
+* Give lib64 site-dir preference over lib on 64-bit systems, to avoid wrong
+ 32-bit compiles in the venv. Fixes #328. Thanks Damien Nozay.
+
+* Fix a bug with prompt-handling in ``activate.csh`` in non-interactive csh
+ shells. Fixes #332. Thanks Benjamin Root for report and patch.
+
+* Make it possible to create a virtualenv from within a Python
+ 3.3. pyvenv. Thanks Chris McDonough for the report.
+
+* Add optional --setuptools option to be able to switch to it in case
+ distribute is the default (like in Debian).
+
+1.8.2 (2012-09-06)
+------------------
+
+* Updated the included pip version to 1.2.1 to fix regressions introduced
+ there in 1.2.
+
+
+1.8.1 (2012-09-03)
+------------------
+
+* Fixed distribute version used with `--never-download`. Thanks michr for
+ report and patch.
+
+* Fix creating Python 3.3 based virtualenvs by unsetting the
+ ``__PYVENV_LAUNCHER__`` environment variable in subprocesses.
+
+
+1.8 (2012-09-01)
+----------------
+
+* **Dropped support for Python 2.4** The minimum supported Python version is
+ now Python 2.5.
+
+* Fix `--relocatable` on systems that use lib64. Fixes #78. Thanks Branden
+ Rolston.
+
+* Symlink some additional modules under Python 3. Fixes #194. Thanks Vinay
+ Sajip, Ian Clelland, and Stefan Holek for the report.
+
+* Fix ``--relocatable`` when a script uses ``__future__`` imports. Thanks
+ Branden Rolston.
+
+* Fix a bug in the config option parser that prevented setting negative
+ options with environment variables. Thanks Ralf Schmitt.
+
+* Allow setting ``--no-site-packages`` from the config file.
+
+* Use ``/usr/bin/multiarch-platform`` if available to figure out the include
+ directory. Thanks for the patch, Mika Laitio.
+
+* Fix ``install_name_tool`` replacement to work on Python 3.X.
+
+* Handle paths of users' site-packages on Mac OS X correctly when changing
+ the prefix.
+
+* Updated the embedded version of distribute to 0.6.28 and pip to 1.2.
+
+
+1.7.2 (2012-06-22)
+------------------
+
+* Updated to distribute 0.6.27.
+
+* Fix activate.fish on OS X. Fixes #8. Thanks David Schoonover.
+
+* Create a virtualenv-x.x script with the Python version when installing, so
+ virtualenv for multiple Python versions can be installed to the same
+ script location. Thanks Miki Tebeka.
+
+* Restored ability to create a virtualenv with a path longer than 78
+ characters, without breaking creation of virtualenvs with non-ASCII paths.
+ Thanks, Bradley Ayers.
+
+* Added ability to create virtualenvs without having installed Apple's
+ developers tools (using an own implementation of ``install_name_tool``).
+ Thanks Mike Hommey.
+
+* Fixed PyPy and Jython support on Windows. Thanks Konstantin Zemlyak.
+
+* Added pydoc script to ease use. Thanks Marc Abramowitz. Fixes #149.
+
+* Fixed creating a bootstrap script on Python 3. Thanks Raul Leal. Fixes #280.
+
+* Fixed inconsistency when having set the ``PYTHONDONTWRITEBYTECODE`` env var
+ with the --distribute option or the ``VIRTUALENV_USE_DISTRIBUTE`` env var.
+ ``VIRTUALENV_USE_DISTRIBUTE`` is now considered again as a legacy alias.
+
+
+1.7.1.2 (2012-02-17)
+--------------------
+
+* Fixed minor issue in `--relocatable`. Thanks, Cap Petschulat.
+
+
+1.7.1.1 (2012-02-16)
+--------------------
+
+* Bumped the version string in ``virtualenv.py`` up, too.
+
+* Fixed rST rendering bug of long description.
+
+
+1.7.1 (2012-02-16)
+------------------
+
+* Update embedded pip to version 1.1.
+
+* Fix `--relocatable` under Python 3. Thanks Doug Hellmann.
+
+* Added environ PATH modification to activate_this.py. Thanks Doug
+ Napoleone. Fixes #14.
+
+* Support creating virtualenvs directly from a Python build directory on
+ Windows. Thanks CBWhiz. Fixes #139.
+
+* Use non-recursive symlinks to fix things up for posix_local install
+ scheme. Thanks michr.
+
+* Made activate script available for use with msys and cygwin on Windows.
+ Thanks Greg Haskins, Cliff Xuan, Jonathan Griffin and Doug Napoleone.
+ Fixes #176.
+
+* Fixed creation of virtualenvs on Windows when Python is not installed for
+ all users. Thanks Anatoly Techtonik for report and patch and Doug
+ Napoleone for testing and confirmation. Fixes #87.
+
+* Fixed creation of virtualenvs using -p in installs where some modules
+ that ought to be in the standard library (e.g. `readline`) are actually
+ installed in `site-packages` next to `virtualenv.py`. Thanks Greg Haskins
+ for report and fix. Fixes #167.
+
+* Added activation script for Powershell (signed by Jannis Leidel). Many
+ thanks to Jason R. Coombs.
+
+
+1.7 (2011-11-30)
+----------------
+
+* Gave user-provided ``--extra-search-dir`` priority over default dirs for
+ finding setuptools/distribute (it already had priority for finding pip).
+ Thanks Ethan Jucovy.
+
+* Updated embedded Distribute release to 0.6.24. Thanks Alex Gronholm.
+
+* Made ``--no-site-packages`` behavior the default behavior. The
+ ``--no-site-packages`` flag is still permitted, but displays a warning when
+ used. Thanks Chris McDonough.
+
+* New flag: ``--system-site-packages``; this flag should be passed to get the
+ previous default global-site-package-including behavior back.
+
+* Added ability to set command options as environment variables and options
+ in a ``virtualenv.ini`` file.
+
+* Fixed various encoding related issues with paths. Thanks Gunnlaugur Thor Briem.
+
+* Made ``virtualenv.py`` script executable.
+
+
+1.6.4 (2011-07-21)
+------------------
+
+* Restored ability to run on Python 2.4, too.
+
+
+1.6.3 (2011-07-16)
+------------------
+
+* Restored ability to run on Python < 2.7.
+
+
+1.6.2 (2011-07-16)
+------------------
+
+* Updated embedded distribute release to 0.6.19.
+
+* Updated embedded pip release to 1.0.2.
+
+* Fixed #141 - Be smarter about finding pkg_resources when using the
+ non-default Python interpreter (by using the ``-p`` option).
+
+* Fixed #112 - Fixed path in docs.
+
+* Fixed #109 - Corrected doctests of a Logger method.
+
+* Fixed #118 - Fixed creating virtualenvs on platforms that use the
+ "posix_local" install scheme, such as Ubuntu with Python 2.7.
+
+* Add missing library to Python 3 virtualenvs (``_dummy_thread``).
+
+
+1.6.1 (2011-04-30)
+------------------
+
+* Start to use git-flow.
+
+* Added support for PyPy 1.5
+
+* Fixed #121 -- added sanity-checking of the -p argument. Thanks Paul Nasrat.
+
+* Added progress meter for pip installation as well as setuptools. Thanks Ethan
+ Jucovy.
+
+* Added --never-download and --search-dir options. Thanks Ethan Jucovy.
+
+
+1.6
+---
+
+* Added Python 3 support! Huge thanks to Vinay Sajip and Vitaly Babiy.
+
+* Fixed creation of virtualenvs on Mac OS X when standard library modules
+ (readline) are installed outside the standard library.
+
+* Updated bundled pip to 1.0.
+
+
+1.5.2
+-----
+
+* Moved main repository to Github: https://github.com/pypa/virtualenv
+
+* Transferred primary maintenance from Ian to Jannis Leidel, Carl Meyer and Brian Rosner
+
+* Fixed a few more pypy related bugs.
+
+* Updated bundled pip to 0.8.2.
+
+* Handed project over to new team of maintainers.
+
+* Moved virtualenv to Github at https://github.com/pypa/virtualenv
+
+
+1.5.1
+-----
+
+* Added ``_weakrefset`` requirement for Python 2.7.1.
+
+* Fixed Windows regression in 1.5
+
+
+1.5
+---
+
+* Include pip 0.8.1.
+
+* Add support for PyPy.
+
+* Uses a proper temporary dir when installing environment requirements.
+
+* Add ``--prompt`` option to be able to override the default prompt prefix.
+
+* Fix an issue with ``--relocatable`` on Windows.
+
+* Fix issue with installing the wrong version of distribute.
+
+* Add fish and csh activate scripts.
+
+
+1.4.9
+-----
+
+* Include pip 0.7.2
+
+
+1.4.8
+-----
+
+* Fix for Mac OS X Framework builds that use
+ ``--universal-archs=intel``
+
+* Fix ``activate_this.py`` on Windows.
+
+* Allow ``$PYTHONHOME`` to be set, so long as you use ``source
+ bin/activate`` it will get unset; if you leave it set and do not
+ activate the environment it will still break the environment.
+
+* Include pip 0.7.1
+
+
+1.4.7
+-----
+
+* Include pip 0.7
+
+
+1.4.6
+-----
+
+* Allow ``activate.sh`` to skip updating the prompt (by setting
+ ``$VIRTUAL_ENV_DISABLE_PROMPT``).
+
+
+1.4.5
+-----
+
+* Include pip 0.6.3
+
+* Fix ``activate.bat`` and ``deactivate.bat`` under Windows when
+ ``PATH`` contained a parenthesis
+
+
+1.4.4
+-----
+
+* Include pip 0.6.2 and Distribute 0.6.10
+
+* Create the ``virtualenv`` script even when Setuptools isn't
+ installed
+
+* Fix problem with ``virtualenv --relocate`` when ``bin/`` has
+ subdirectories (e.g., ``bin/.svn/``); from Alan Franzoni.
+
+* If you set ``$VIRTUALENV_DISTRIBUTE`` then virtualenv will use
+ Distribute by default (so you don't have to remember to use
+ ``--distribute``).
+
+
+1.4.3
+-----
+
+* Include pip 0.6.1
+
+
+1.4.2
+-----
+
+* Fix pip installation on Windows
+
+* Fix use of stand-alone ``virtualenv.py`` (and boot scripts)
+
+* Exclude ~/.local (user site-packages) from environments when using
+ ``--no-site-packages``
+
+
+1.4.1
+-----
+
+* Include pip 0.6
+
+
+1.4
+---
+
+* Updated setuptools to 0.6c11
+
+* Added the --distribute option
+
+* Fixed packaging problem of support-files
+
+
+1.3.4
+-----
+
+* Virtualenv now copies the actual embedded Python binary on
+ Mac OS X to fix a hang on Snow Leopard (10.6).
+
+* Fail more gracefully on Windows when ``win32api`` is not installed.
+
+* Fix site-packages taking precedent over Jython's ``__classpath__``
+ and also specially handle the new ``__pyclasspath__`` entry in
+ ``sys.path``.
+
+* Now copies Jython's ``registry`` file to the virtualenv if it exists.
+
+* Better find libraries when compiling extensions on Windows.
+
+* Create ``Scripts\pythonw.exe`` on Windows.
+
+* Added support for the Debian/Ubuntu
+ ``/usr/lib/pythonX.Y/dist-packages`` directory.
+
+* Set ``distutils.sysconfig.get_config_vars()['LIBDIR']`` (based on
+ ``sys.real_prefix``) which is reported to help building on Windows.
+
+* Make ``deactivate`` work on ksh
+
+* Fixes for ``--python``: make it work with ``--relocatable`` and the
+ symlink created to the exact Python version.
+
+
+1.3.3
+-----
+
+* Use Windows newlines in ``activate.bat``, which has been reported to help
+ when using non-ASCII directory names.
+
+* Fixed compatibility with Jython 2.5b1.
+
+* Added a function ``virtualenv.install_python`` for more fine-grained
+ access to what ``virtualenv.create_environment`` does.
+
+* Fix `a problem <https://bugs.launchpad.net/virtualenv/+bug/241581>`_
+ with Windows and paths that contain spaces.
+
+* If ``/path/to/env/.pydistutils.cfg`` exists (or
+ ``/path/to/env/pydistutils.cfg`` on Windows systems) then ignore
+ ``~/.pydistutils.cfg`` and use that other file instead.
+
+* Fix ` a problem
+ <https://bugs.launchpad.net/virtualenv/+bug/340050>`_ picking up
+ some ``.so`` libraries in ``/usr/local``.
+
+
+1.3.2
+-----
+
+* Remove the ``[install] prefix = ...`` setting from the virtualenv
+ ``distutils.cfg`` -- this has been causing problems for a lot of
+ people, in rather obscure ways.
+
+* If you use a boot script it will attempt to import ``virtualenv``
+ and find a pre-downloaded Setuptools egg using that.
+
+* Added platform-specific paths, like ``/usr/lib/pythonX.Y/plat-linux2``
+
+
+1.3.1
+-----
+
+* Real Python 2.6 compatibility. Backported the Python 2.6 updates to
+ ``site.py``, including `user directories
+ <http://docs.python.org/dev/whatsnew/2.6.html#pep-370-per-user-site-packages-directory>`_
+ (this means older versions of Python will support user directories,
+ whether intended or not).
+
+* Always set ``[install] prefix`` in ``distutils.cfg`` -- previously
+ on some platforms where a system-wide ``distutils.cfg`` was present
+ with a ``prefix`` setting, packages would be installed globally
+ (usually in ``/usr/local/lib/pythonX.Y/site-packages``).
+
+* Sometimes Cygwin seems to leave ``.exe`` off ``sys.executable``; a
+ workaround is added.
+
+* Fix ``--python`` option.
+
+* Fixed handling of Jython environments that use a
+ jython-complete.jar.
+
+
+1.3
+---
+
+* Update to Setuptools 0.6c9
+* Added an option ``virtualenv --relocatable EXISTING_ENV``, which
+ will make an existing environment "relocatable" -- the paths will
+ not be absolute in scripts, ``.egg-info`` and ``.pth`` files. This
+ may assist in building environments that can be moved and copied.
+ You have to run this *after* any new packages installed.
+* Added ``bin/activate_this.py``, a file you can use like
+ ``execfile("path_to/activate_this.py",
+ dict(__file__="path_to/activate_this.py"))`` -- this will activate
+ the environment in place, similar to what `the mod_wsgi example
+ does <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+* For Mac framework builds of Python, the site-packages directory
+ ``/Library/Python/X.Y/site-packages`` is added to ``sys.path``, from
+ Andrea Rech.
+* Some platform-specific modules in Macs are added to the path now
+ (``plat-darwin/``, ``plat-mac/``, ``plat-mac/lib-scriptpackages``),
+ from Andrea Rech.
+* Fixed a small Bashism in the ``bin/activate`` shell script.
+* Added ``__future__`` to the list of required modules, for Python
+ 2.3. You'll still need to backport your own ``subprocess`` module.
+* Fixed the ``__classpath__`` entry in Jython's ``sys.path`` taking
+ precedent over virtualenv's libs.
+
+
+1.2
+---
+
+* Added a ``--python`` option to select the Python interpreter.
+* Add ``warnings`` to the modules copied over, for Python 2.6 support.
+* Add ``sets`` to the module copied over for Python 2.3 (though Python
+ 2.3 still probably doesn't work).
+
+
+1.1.1
+-----
+
+* Added support for Jython 2.5.
+
+
+1.1
+---
+
+* Added support for Python 2.6.
+* Fix a problem with missing ``DLLs/zlib.pyd`` on Windows. Create
+* ``bin/python`` (or ``bin/python.exe``) even when you run virtualenv
+ with an interpreter named, e.g., ``python2.4``
+* Fix MacPorts Python
+* Added --unzip-setuptools option
+* Update to Setuptools 0.6c8
+* If the current directory is not writable, run ez_setup.py in ``/tmp``
+* Copy or symlink over the ``include`` directory so that packages will
+ more consistently compile.
+
+
+1.0
+---
+
+* Fix build on systems that use ``/usr/lib64``, distinct from
+ ``/usr/lib`` (specifically CentOS x64).
+* Fixed bug in ``--clear``.
+* Fixed typos in ``deactivate.bat``.
+* Preserve ``$PYTHONPATH`` when calling subprocesses.
+
+
+0.9.2
+-----
+
+* Fix include dir copying on Windows (makes compiling possible).
+* Include the main ``lib-tk`` in the path.
+* Patch ``distutils.sysconfig``: ``get_python_inc`` and
+ ``get_python_lib`` to point to the global locations.
+* Install ``distutils.cfg`` before Setuptools, so that system
+ customizations of ``distutils.cfg`` won't effect the installation.
+* Add ``bin/pythonX.Y`` to the virtualenv (in addition to
+ ``bin/python``).
+* Fixed an issue with Mac Framework Python builds, and absolute paths
+ (from Ronald Oussoren).
+
+
+0.9.1
+-----
+
+* Improve ability to create a virtualenv from inside a virtualenv.
+* Fix a little bug in ``bin/activate``.
+* Actually get ``distutils.cfg`` to work reliably.
+
+
+0.9
+---
+
+* Added ``lib-dynload`` and ``config`` to things that need to be
+ copied over in an environment.
+* Copy over or symlink the ``include`` directory, so that you can
+ build packages that need the C headers.
+* Include a ``distutils`` package, so you can locally update
+ ``distutils.cfg`` (in ``lib/pythonX.Y/distutils/distutils.cfg``).
+* Better avoid downloading Setuptools, and hitting PyPI on environment
+ creation.
+* Fix a problem creating a ``lib64/`` directory.
+* Should work on MacOSX Framework builds (the default Python
+ installations on Mac). Thanks to Ronald Oussoren.
+
+
+0.8.4
+-----
+
+* Windows installs would sometimes give errors about ``sys.prefix`` that
+ were inaccurate.
+* Slightly prettier output.
+
+
+0.8.3
+-----
+
+* Added support for Windows.
+
+
+0.8.2
+-----
+
+* Give a better warning if you are on an unsupported platform (Mac
+ Framework Pythons, and Windows).
+* Give error about running while inside a workingenv.
+* Give better error message about Python 2.3.
+
+
+0.8.1
+-----
+
+Fixed packaging of the library.
+
+
+0.8
+---
+
+Initial release. Everything is changed and new!
diff --git a/testing/mozharness/external_tools/virtualenv/docs/conf.py b/testing/mozharness/external_tools/virtualenv/docs/conf.py
new file mode 100644
index 000000000..9332aa1bc
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/conf.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+#
+# Paste documentation build configuration file, created by
+# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import os
+import sys
+
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+# If your extensions are in another directory, add it here.
+sys.path.insert(0, os.path.abspath(os.pardir))
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks']
+
+# Add any paths that contain templates here, relative to this directory.
+#templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'virtualenv'
+copyright = '2007-2014, Ian Bicking, The Open Planning Project, PyPA'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+try:
+ from virtualenv import __version__
+ # The short X.Y version.
+ version = '.'.join(__version__.split('.')[:2])
+ # The full version, including alpha/beta/rc tags.
+ release = __version__
+except ImportError:
+ version = release = 'dev'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = []
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+extlinks = {
+ 'issue': ('https://github.com/pypa/virtualenv/issues/%s', '#'),
+ 'pull': ('https://github.com/pypa/virtualenv/pull/%s', 'PR #'),
+}
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+#html_style = 'default.css'
+
+html_theme = 'default'
+if not on_rtd:
+ try:
+ import sphinx_rtd_theme
+ html_theme = 'sphinx_rtd_theme'
+ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+ except ImportError:
+ pass
+
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Content template for the index page.
+#html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Pastedoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+#latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/testing/mozharness/external_tools/virtualenv/docs/development.rst b/testing/mozharness/external_tools/virtualenv/docs/development.rst
new file mode 100644
index 000000000..aba2785a3
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/development.rst
@@ -0,0 +1,61 @@
+Development
+===========
+
+Contributing
+------------
+
+Refer to the `pip development`_ documentation - it applies equally to
+virtualenv, except that virtualenv issues should filed on the `virtualenv
+repo`_ at GitHub.
+
+Virtualenv's release schedule is tied to pip's -- each time there's a new pip
+release, there will be a new virtualenv release that bundles the new version of
+pip.
+
+Files in the `virtualenv_embedded/` subdirectory are embedded into
+`virtualenv.py` itself as base64-encoded strings (in order to support
+single-file use of `virtualenv.py` without installing it). If your patch
+changes any file in `virtualenv_embedded/`, run `bin/rebuild-script.py` to
+update the embedded version of that file in `virtualenv.py`; commit that and
+submit it as part of your patch / pull request.
+
+.. _pip development: http://www.pip-installer.org/en/latest/development.html
+.. _virtualenv repo: https://github.com/pypa/virtualenv/
+
+Running the tests
+-----------------
+
+Virtualenv's test suite is small and not yet at all comprehensive, but we aim
+to grow it.
+
+The easy way to run tests (handles test dependencies automatically)::
+
+ $ python setup.py test
+
+If you want to run only a selection of the tests, you'll need to run them
+directly with pytest instead. Create a virtualenv, and install required
+packages::
+
+ $ pip install pytest mock
+
+Run pytest::
+
+ $ pytest
+
+Or select just a single test file to run::
+
+ $ pytest tests/test_virtualenv
+
+Status and License
+------------------
+
+``virtualenv`` is a successor to `workingenv
+<http://cheeseshop.python.org/pypi/workingenv.py>`_, and an extension
+of `virtual-python
+<http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_.
+
+It was written by Ian Bicking, sponsored by the `Open Planning
+Project <http://openplans.org>`_ and is now maintained by a
+`group of developers <https://github.com/pypa/virtualenv/raw/master/AUTHORS.txt>`_.
+It is licensed under an
+`MIT-style permissive license <https://github.com/pypa/virtualenv/raw/master/LICENSE.txt>`_.
diff --git a/testing/mozharness/external_tools/virtualenv/docs/index.rst b/testing/mozharness/external_tools/virtualenv/docs/index.rst
new file mode 100644
index 000000000..e745a87b7
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/index.rst
@@ -0,0 +1,137 @@
+Virtualenv
+==========
+
+`Mailing list <http://groups.google.com/group/python-virtualenv>`_ |
+`Issues <https://github.com/pypa/virtualenv/issues>`_ |
+`Github <https://github.com/pypa/virtualenv>`_ |
+`PyPI <https://pypi.python.org/pypi/virtualenv/>`_ |
+User IRC: #pypa
+Dev IRC: #pypa-dev
+
+Introduction
+------------
+
+``virtualenv`` is a tool to create isolated Python environments.
+
+The basic problem being addressed is one of dependencies and versions,
+and indirectly permissions. Imagine you have an application that
+needs version 1 of LibFoo, but another application requires version
+2. How can you use both these applications? If you install
+everything into ``/usr/lib/python2.7/site-packages`` (or whatever your
+platform's standard location is), it's easy to end up in a situation
+where you unintentionally upgrade an application that shouldn't be
+upgraded.
+
+Or more generally, what if you want to install an application *and
+leave it be*? If an application works, any change in its libraries or
+the versions of those libraries can break the application.
+
+Also, what if you can't install packages into the global
+``site-packages`` directory? For instance, on a shared host.
+
+In all these cases, ``virtualenv`` can help you. It creates an
+environment that has its own installation directories, that doesn't
+share libraries with other virtualenv environments (and optionally
+doesn't access the globally installed libraries either).
+
+.. comment: split here
+
+.. toctree::
+ :maxdepth: 2
+
+ installation
+ userguide
+ reference
+ development
+ changes
+
+.. warning::
+
+ Python bugfix releases 2.6.8, 2.7.3, 3.1.5 and 3.2.3 include a change that
+ will cause "import random" to fail with "cannot import name urandom" on any
+ virtualenv created on a Unix host with an earlier release of Python
+ 2.6/2.7/3.1/3.2, if the underlying system Python is upgraded. This is due to
+ the fact that a virtualenv uses the system Python's standard library but
+ contains its own copy of the Python interpreter, so an upgrade to the system
+ Python results in a mismatch between the version of the Python interpreter
+ and the version of the standard library. It can be fixed by removing
+ ``$ENV/bin/python`` and re-running virtualenv on the same target directory
+ with the upgraded Python.
+
+Other Documentation and Links
+-----------------------------
+
+* `Blog announcement of virtualenv`__.
+
+ .. __: http://blog.ianbicking.org/2007/10/10/workingenv-is-dead-long-live-virtualenv/
+
+* James Gardner has written a tutorial on using `virtualenv with
+ Pylons
+ <http://wiki.pylonshq.com/display/pylonscookbook/Using+a+Virtualenv+Sandbox>`_.
+
+* Chris Perkins created a `showmedo video including virtualenv
+ <http://showmedo.com/videos/video?name=2910000&fromSeriesID=291>`_.
+
+* Doug Hellmann's `virtualenvwrapper`_ is a useful set of scripts to make
+ your workflow with many virtualenvs even easier. `His initial blog post on it`__.
+ He also wrote `an example of using virtualenv to try IPython`__.
+
+ .. _virtualenvwrapper: https://pypi.python.org/pypi/virtualenvwrapper/
+ .. __: https://doughellmann.com/blog/2008/05/01/virtualenvwrapper/
+ .. __: https://doughellmann.com/blog/2008/02/01/ipython-and-virtualenv/
+
+* `Pew`_ is another wrapper for virtualenv that makes use of a different
+ activation technique.
+
+ .. _Pew: https://pypi.python.org/pypi/pew/
+
+* `Using virtualenv with mod_wsgi
+ <http://code.google.com/p/modwsgi/wiki/VirtualEnvironments>`_.
+
+* `virtualenv commands
+ <https://github.com/thisismedium/virtualenv-commands>`_ for some more
+ workflow-related tools around virtualenv.
+
+* PyCon US 2011 talk: `Reverse-engineering Ian Bicking's brain: inside pip and virtualenv
+ <http://pyvideo.org/video/568/reverse-engineering-ian-bicking--39-s-brain--insi>`_.
+ By the end of the talk, you'll have a good idea exactly how pip
+ and virtualenv do their magic, and where to go looking in the source
+ for particular behaviors or bug fixes.
+
+Compare & Contrast with Alternatives
+------------------------------------
+
+There are several alternatives that create isolated environments:
+
+* ``workingenv`` (which I do not suggest you use anymore) is the
+ predecessor to this library. It used the main Python interpreter,
+ but relied on setting ``$PYTHONPATH`` to activate the environment.
+ This causes problems when running Python scripts that aren't part of
+ the environment (e.g., a globally installed ``hg`` or ``bzr``). It
+ also conflicted a lot with Setuptools.
+
+* `virtual-python
+ <http://peak.telecommunity.com/DevCenter/EasyInstall#creating-a-virtual-python>`_
+ is also a predecessor to this library. It uses only symlinks, so it
+ couldn't work on Windows. It also symlinks over the *entire*
+ standard library and global ``site-packages``. As a result, it
+ won't see new additions to the global ``site-packages``.
+
+ This script only symlinks a small portion of the standard library
+ into the environment, and so on Windows it is feasible to simply
+ copy these files over. Also, it creates a new/empty
+ ``site-packages`` and also adds the global ``site-packages`` to the
+ path, so updates are tracked separately. This script also installs
+ Setuptools automatically, saving a step and avoiding the need for
+ network access.
+
+* `zc.buildout <http://pypi.python.org/pypi/zc.buildout>`_ doesn't
+ create an isolated Python environment in the same style, but
+ achieves similar results through a declarative config file that sets
+ up scripts with very particular packages. As a declarative system,
+ it is somewhat easier to repeat and manage, but more difficult to
+ experiment with. ``zc.buildout`` includes the ability to setup
+ non-Python systems (e.g., a database server or an Apache instance).
+
+I *strongly* recommend anyone doing application development or
+deployment use one of these tools.
diff --git a/testing/mozharness/external_tools/virtualenv/docs/installation.rst b/testing/mozharness/external_tools/virtualenv/docs/installation.rst
new file mode 100644
index 000000000..3006d7617
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/installation.rst
@@ -0,0 +1,58 @@
+Installation
+============
+
+.. warning::
+
+ We advise installing virtualenv-1.9 or greater. Prior to version 1.9, the
+ pip included in virtualenv did not download from PyPI over SSL.
+
+.. warning::
+
+ When using pip to install virtualenv, we advise using pip 1.3 or greater.
+ Prior to version 1.3, pip did not download from PyPI over SSL.
+
+.. warning::
+
+ We advise against using easy_install to install virtualenv when using
+ setuptools < 0.9.7, because easy_install didn't download from PyPI over SSL
+ and was broken in some subtle ways.
+
+To install globally with `pip` (if you have pip 1.3 or greater installed globally):
+
+::
+
+ $ [sudo] pip install virtualenv
+
+Or to get the latest unreleased dev version:
+
+::
+
+ $ [sudo] pip install https://github.com/pypa/virtualenv/tarball/develop
+
+
+To install version X.X globally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ [sudo] python setup.py install
+
+
+To *use* locally from source:
+
+::
+
+ $ curl -O https://pypi.python.org/packages/source/v/virtualenv/virtualenv-X.X.tar.gz
+ $ tar xvfz virtualenv-X.X.tar.gz
+ $ cd virtualenv-X.X
+ $ python virtualenv.py myVE
+
+.. note::
+
+ The ``virtualenv.py`` script is *not* supported if run without the
+ necessary pip/setuptools/virtualenv distributions available locally. All
+ of the installation methods above include a ``virtualenv_support``
+ directory alongside ``virtualenv.py`` which contains a complete set of
+ pip and setuptools distributions, and so are fully supported.
diff --git a/testing/mozharness/external_tools/virtualenv/docs/make.bat b/testing/mozharness/external_tools/virtualenv/docs/make.bat
new file mode 100644
index 000000000..aa5c189fc
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/make.bat
@@ -0,0 +1,170 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-compressor.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-compressor.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/testing/mozharness/external_tools/virtualenv/docs/reference.rst b/testing/mozharness/external_tools/virtualenv/docs/reference.rst
new file mode 100644
index 000000000..9249473c9
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/reference.rst
@@ -0,0 +1,261 @@
+Reference Guide
+===============
+
+``virtualenv`` Command
+----------------------
+
+.. _usage:
+
+Usage
+~~~~~
+
+:command:`virtualenv [OPTIONS] ENV_DIR`
+
+ Where ``ENV_DIR`` is an absolute or relative path to a directory to create
+ the virtual environment in.
+
+.. _options:
+
+Options
+~~~~~~~
+
+.. program: virtualenv
+
+.. option:: --version
+
+ show program's version number and exit
+
+.. option:: -h, --help
+
+ show this help message and exit
+
+.. option:: -v, --verbose
+
+ Increase verbosity.
+
+.. option:: -q, --quiet
+
+ Decrease verbosity.
+
+.. option:: -p PYTHON_EXE, --python=PYTHON_EXE
+
+ The Python interpreter to use, e.g.,
+ --python=python2.5 will use the python2.5 interpreter
+ to create the new environment. The default is the
+ interpreter that virtualenv was installed with
+ (like ``/usr/bin/python``)
+
+.. option:: --clear
+
+ Clear out the non-root install and start from scratch.
+
+.. option:: --system-site-packages
+
+ Give the virtual environment access to the global
+ site-packages.
+
+.. option:: --always-copy
+
+ Always copy files rather than symlinking.
+
+.. option:: --relocatable
+
+ Make an EXISTING virtualenv environment relocatable.
+ This fixes up scripts and makes all .pth files relative.
+
+.. option:: --unzip-setuptools
+
+ Unzip Setuptools when installing it.
+
+.. option:: --no-setuptools
+
+ Do not install setuptools in the new virtualenv.
+
+.. option:: --no-pip
+
+ Do not install pip in the new virtualenv.
+
+.. option:: --no-wheel
+
+ Do not install wheel in the new virtualenv.
+
+.. option:: --extra-search-dir=DIR
+
+ Directory to look for setuptools/pip distributions in.
+ This option can be specified multiple times.
+
+.. option:: --prompt=PROMPT
+
+ Provides an alternative prompt prefix for this
+ environment.
+
+.. option:: --download
+
+ Download preinstalled packages from PyPI.
+
+.. option:: --no-download
+
+ Do not download preinstalled packages from PyPI.
+
+.. option:: --no-site-packages
+
+ DEPRECATED. Retained only for backward compatibility.
+ Not having access to global site-packages is now the
+ default behavior.
+
+.. option:: --distribute
+.. option:: --setuptools
+
+ Legacy; now have no effect. Before version 1.10 these could be used
+ to choose whether to install Distribute_ or Setuptools_ into the created
+ virtualenv. Distribute has now been merged into Setuptools, and the
+ latter is always installed.
+
+.. _Distribute: https://pypi.python.org/pypi/distribute
+.. _Setuptools: https://pypi.python.org/pypi/setuptools
+
+
+Configuration
+-------------
+
+Environment Variables
+~~~~~~~~~~~~~~~~~~~~~
+
+Each command line option is automatically used to look for environment
+variables with the name format ``VIRTUALENV_<UPPER_NAME>``. That means
+the name of the command line options are capitalized and have dashes
+(``'-'``) replaced with underscores (``'_'``).
+
+For example, to automatically use a custom Python binary instead of the
+one virtualenv is run with you can also set an environment variable::
+
+ $ export VIRTUALENV_PYTHON=/opt/python-3.3/bin/python
+ $ virtualenv ENV
+
+It's the same as passing the option to virtualenv directly::
+
+ $ virtualenv --python=/opt/python-3.3/bin/python ENV
+
+This also works for appending command line options, like ``--find-links``.
+Just leave an empty space between the passed values, e.g.::
+
+ $ export VIRTUALENV_EXTRA_SEARCH_DIR="/path/to/dists /path/to/other/dists"
+ $ virtualenv ENV
+
+is the same as calling::
+
+ $ virtualenv --extra-search-dir=/path/to/dists --extra-search-dir=/path/to/other/dists ENV
+
+.. envvar:: VIRTUAL_ENV_DISABLE_PROMPT
+
+ Any virtualenv created when this is set to a non-empty value will not have
+ it's :ref:`activate` modify the shell prompt.
+
+
+Configuration File
+~~~~~~~~~~~~~~~~~~
+
+virtualenv also looks for a standard ini config file. On Unix and Mac OS X
+that's ``$HOME/.virtualenv/virtualenv.ini`` and on Windows, it's
+``%APPDATA%\virtualenv\virtualenv.ini``.
+
+The names of the settings are derived from the long command line option,
+e.g. the option :option:`--python <-p>` would look like this::
+
+ [virtualenv]
+ python = /opt/python-3.3/bin/python
+
+Appending options like :option:`--extra-search-dir` can be written on multiple
+lines::
+
+ [virtualenv]
+ extra-search-dir =
+ /path/to/dists
+ /path/to/other/dists
+
+Please have a look at the output of :option:`--help <-h>` for a full list
+of supported options.
+
+
+Extending Virtualenv
+--------------------
+
+
+Creating Your Own Bootstrap Scripts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+While this creates an environment, it doesn't put anything into the
+environment. Developers may find it useful to distribute a script
+that sets up a particular environment, for example a script that
+installs a particular web application.
+
+To create a script like this, call
+:py:func:`virtualenv.create_bootstrap_script`, and write the
+result to your new bootstrapping script.
+
+.. py:function:: create_bootstrap_script(extra_text)
+
+ Creates a bootstrap script from ``extra_text``, which is like
+ this script but with extend_parser, adjust_options, and after_install hooks.
+
+This returns a string that (written to disk of course) can be used
+as a bootstrap script with your own customizations. The script
+will be the standard virtualenv.py script, with your extra text
+added (your extra text should be Python code).
+
+If you include these functions, they will be called:
+
+.. py:function:: extend_parser(optparse_parser)
+
+ You can add or remove options from the parser here.
+
+.. py:function:: adjust_options(options, args)
+
+ You can change options here, or change the args (if you accept
+ different kinds of arguments, be sure you modify ``args`` so it is
+ only ``[DEST_DIR]``).
+
+.. py:function:: after_install(options, home_dir)
+
+ After everything is installed, this function is called. This
+ is probably the function you are most likely to use. An
+ example would be::
+
+ def after_install(options, home_dir):
+ if sys.platform == 'win32':
+ bin = 'Scripts'
+ else:
+ bin = 'bin'
+ subprocess.call([join(home_dir, bin, 'easy_install'),
+ 'MyPackage'])
+ subprocess.call([join(home_dir, bin, 'my-package-script'),
+ 'setup', home_dir])
+
+ This example immediately installs a package, and runs a setup
+ script from that package.
+
+Bootstrap Example
+~~~~~~~~~~~~~~~~~
+
+Here's a more concrete example of how you could use this::
+
+ import virtualenv, textwrap
+ output = virtualenv.create_bootstrap_script(textwrap.dedent("""
+ import os, subprocess
+ def after_install(options, home_dir):
+ etc = join(home_dir, 'etc')
+ if not os.path.exists(etc):
+ os.makedirs(etc)
+ subprocess.call([join(home_dir, 'bin', 'easy_install'),
+ 'BlogApplication'])
+ subprocess.call([join(home_dir, 'bin', 'paster'),
+ 'make-config', 'BlogApplication',
+ join(etc, 'blog.ini')])
+ subprocess.call([join(home_dir, 'bin', 'paster'),
+ 'setup-app', join(etc, 'blog.ini')])
+ """))
+ f = open('blog-bootstrap.py', 'w').write(output)
+
+Another example is available `here`__.
+
+.. __: https://github.com/socialplanning/fassembler/blob/master/fassembler/create-venv-script.py
diff --git a/testing/mozharness/external_tools/virtualenv/docs/userguide.rst b/testing/mozharness/external_tools/virtualenv/docs/userguide.rst
new file mode 100644
index 000000000..35f0dc950
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/docs/userguide.rst
@@ -0,0 +1,258 @@
+User Guide
+==========
+
+
+Usage
+-----
+
+Virtualenv has one basic command::
+
+ $ virtualenv ENV
+
+Where ``ENV`` is a directory to place the new virtual environment. It has
+a number of usual effects (modifiable by many :ref:`options`):
+
+ - :file:`ENV/lib/` and :file:`ENV/include/` are created, containing supporting
+ library files for a new virtualenv python. Packages installed in this
+ environment will live under :file:`ENV/lib/pythonX.X/site-packages/`.
+
+ - :file:`ENV/bin` is created, where executables live - noticeably a new
+ :command:`python`. Thus running a script with ``#! /path/to/ENV/bin/python``
+ would run that script under this virtualenv's python.
+
+ - The crucial packages pip_ and setuptools_ are installed, which allow other
+ packages to be easily installed to the environment. This associated pip
+ can be run from :file:`ENV/bin/pip`.
+
+The python in your new virtualenv is effectively isolated from the python that
+was used to create it.
+
+.. _pip: https://pypi.python.org/pypi/pip
+.. _setuptools: https://pypi.python.org/pypi/setuptools
+
+
+.. _activate:
+
+activate script
+~~~~~~~~~~~~~~~
+
+In a newly created virtualenv there will also be a :command:`activate` shell
+script. For Windows systems, activation scripts are provided for
+the Command Prompt and Powershell.
+
+On Posix systems, this resides in :file:`/ENV/bin/`, so you can run::
+
+ $ source bin/activate
+
+For some shells (e.g. the original Bourne Shell) you may need to use the
+:command:`.` command, when :command:`source` does not exist. There are also
+separate activate files for some other shells, like csh and fish.
+:file:`bin/activate` should work for bash/zsh/dash.
+
+This will change your ``$PATH`` so its first entry is the virtualenv's
+``bin/`` directory. (You have to use ``source`` because it changes your
+shell environment in-place.) This is all it does; it's purely a
+convenience. If you directly run a script or the python interpreter
+from the virtualenv's ``bin/`` directory (e.g. ``path/to/ENV/bin/pip``
+or ``/path/to/ENV/bin/python-script.py``) there's no need for
+activation.
+
+The ``activate`` script will also modify your shell prompt to indicate
+which environment is currently active. To disable this behaviour, see
+:envvar:`VIRTUAL_ENV_DISABLE_PROMPT`.
+
+To undo these changes to your path (and prompt), just run::
+
+ $ deactivate
+
+On Windows, the equivalent `activate` script is in the ``Scripts`` folder::
+
+ > \path\to\env\Scripts\activate
+
+And type ``deactivate`` to undo the changes.
+
+Based on your active shell (CMD.exe or Powershell.exe), Windows will use
+either activate.bat or activate.ps1 (as appropriate) to activate the
+virtual environment. If using Powershell, see the notes about code signing
+below.
+
+.. note::
+
+ If using Powershell, the ``activate`` script is subject to the
+ `execution policies`_ on the system. By default on Windows 7, the system's
+ excution policy is set to ``Restricted``, meaning no scripts like the
+ ``activate`` script are allowed to be executed. But that can't stop us
+ from changing that slightly to allow it to be executed.
+
+ In order to use the script, you can relax your system's execution
+ policy to ``AllSigned``, meaning all scripts on the system must be
+ digitally signed to be executed. Since the virtualenv activation
+ script is signed by one of the authors (Jannis Leidel) this level of
+ the execution policy suffices. As an administrator run::
+
+ PS C:\> Set-ExecutionPolicy AllSigned
+
+ Then you'll be asked to trust the signer, when executing the script.
+ You will be prompted with the following::
+
+ PS C:\> virtualenv .\foo
+ New python executable in C:\foo\Scripts\python.exe
+ Installing setuptools................done.
+ Installing pip...................done.
+ PS C:\> .\foo\scripts\activate
+
+ Do you want to run software from this untrusted publisher?
+ File C:\foo\scripts\activate.ps1 is published by E=jannis@leidel.info,
+ CN=Jannis Leidel, L=Berlin, S=Berlin, C=DE, Description=581796-Gh7xfJxkxQSIO4E0
+ and is not trusted on your system. Only run scripts from trusted publishers.
+ [V] Never run [D] Do not run [R] Run once [A] Always run [?] Help
+ (default is "D"):A
+ (foo) PS C:\>
+
+ If you select ``[A] Always Run``, the certificate will be added to the
+ Trusted Publishers of your user account, and will be trusted in this
+ user's context henceforth. If you select ``[R] Run Once``, the script will
+ be run, but you will be prometed on a subsequent invocation. Advanced users
+ can add the signer's certificate to the Trusted Publishers of the Computer
+ account to apply to all users (though this technique is out of scope of this
+ document).
+
+ Alternatively, you may relax the system execution policy to allow running
+ of local scripts without verifying the code signature using the following::
+
+ PS C:\> Set-ExecutionPolicy RemoteSigned
+
+ Since the ``activate.ps1`` script is generated locally for each virtualenv,
+ it is not considered a remote script and can then be executed.
+
+.. _`execution policies`: http://technet.microsoft.com/en-us/library/dd347641.aspx
+
+Removing an Environment
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Removing a virtual environment is simply done by deactivating it and deleting the
+environment folder with all its contents::
+
+ (ENV)$ deactivate
+ $ rm -r /path/to/ENV
+
+The :option:`--system-site-packages` Option
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you build with ``virtualenv --system-site-packages ENV``, your virtual
+environment will inherit packages from ``/usr/lib/python2.7/site-packages``
+(or wherever your global site-packages directory is).
+
+This can be used if you have control over the global site-packages directory,
+and you want to depend on the packages there. If you want isolation from the
+global system, do not use this flag.
+
+Windows Notes
+~~~~~~~~~~~~~
+
+Some paths within the virtualenv are slightly different on Windows: scripts and
+executables on Windows go in ``ENV\Scripts\`` instead of ``ENV/bin/`` and
+libraries go in ``ENV\Lib\`` rather than ``ENV/lib/``.
+
+To create a virtualenv under a path with spaces in it on Windows, you'll need
+the `win32api <http://sourceforge.net/projects/pywin32/>`_ library installed.
+
+
+Using Virtualenv without ``bin/python``
+---------------------------------------
+
+Sometimes you can't or don't want to use the Python interpreter
+created by the virtualenv. For instance, in a `mod_python
+<http://www.modpython.org/>`_ or `mod_wsgi <http://www.modwsgi.org/>`_
+environment, there is only one interpreter.
+
+Luckily, it's easy. You must use the custom Python interpreter to
+*install* libraries. But to *use* libraries, you just have to be sure
+the path is correct. A script is available to correct the path. You
+can setup the environment like::
+
+ activate_this = '/path/to/env/bin/activate_this.py'
+ execfile(activate_this, dict(__file__=activate_this))
+
+This will change ``sys.path`` and even change ``sys.prefix``, but also allow
+you to use an existing interpreter. Items in your environment will show up
+first on ``sys.path``, before global items. However, global items will
+always be accessible (as if the :option:`--system-site-packages` flag had been
+used in creating the environment, whether it was or not). Also, this cannot undo
+the activation of other environments, or modules that have been imported.
+You shouldn't try to, for instance, activate an environment before a web
+request; you should activate *one* environment as early as possible, and not
+do it again in that process.
+
+Making Environments Relocatable
+-------------------------------
+
+**Note:** this option is somewhat experimental, and there are probably
+caveats that have not yet been identified.
+
+.. warning::
+
+ The ``--relocatable`` option currently has a number of issues,
+ and is not guaranteed to work in all circumstances. It is possible
+ that the option will be deprecated in a future version of ``virtualenv``.
+
+Normally environments are tied to a specific path. That means that
+you cannot move an environment around or copy it to another computer.
+You can fix up an environment to make it relocatable with the
+command::
+
+ $ virtualenv --relocatable ENV
+
+This will make some of the files created by setuptools use relative paths,
+and will change all the scripts to use ``activate_this.py`` instead of using
+the location of the Python interpreter to select the environment.
+
+**Note:** scripts which have been made relocatable will only work if
+the virtualenv is activated, specifically the python executable from
+the virtualenv must be the first one on the system PATH. Also note that
+the activate scripts are not currently made relocatable by
+``virtualenv --relocatable``.
+
+**Note:** you must run this after you've installed *any* packages into
+the environment. If you make an environment relocatable, then
+install a new package, you must run ``virtualenv --relocatable``
+again.
+
+Also, this **does not make your packages cross-platform**. You can
+move the directory around, but it can only be used on other similar
+computers. Some known environmental differences that can cause
+incompatibilities: a different version of Python, when one platform
+uses UCS2 for its internal unicode representation and another uses
+UCS4 (a compile-time option), obvious platform changes like Windows
+vs. Linux, or Intel vs. ARM, and if you have libraries that bind to C
+libraries on the system, if those C libraries are located somewhere
+different (either different versions, or a different filesystem
+layout).
+
+If you use this flag to create an environment, currently, the
+:option:`--system-site-packages` option will be implied.
+
+The :option:`--extra-search-dir` option
+---------------------------------------
+
+This option allows you to provide your own versions of setuptools and/or
+pip to use instead of the embedded versions that come with virtualenv.
+
+To use this feature, pass one or more ``--extra-search-dir`` options to
+virtualenv like this::
+
+ $ virtualenv --extra-search-dir=/path/to/distributions ENV
+
+The ``/path/to/distributions`` path should point to a directory that contains
+setuptools and/or pip wheels.
+
+virtualenv will look for wheels in the specified directories, but will use
+pip's standard algorithm for selecting the wheel to install, which looks for
+the latest compatible wheel.
+
+As well as the extra directories, the search order includes:
+
+#. The ``virtualenv_support`` directory relative to virtualenv.py
+#. The directory where virtualenv.py is located.
+#. The current directory.
+
diff --git a/testing/mozharness/external_tools/virtualenv/scripts/virtualenv b/testing/mozharness/external_tools/virtualenv/scripts/virtualenv
new file mode 100644
index 000000000..c961dd7db
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/scripts/virtualenv
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+import virtualenv
+virtualenv.main()
diff --git a/testing/mozharness/external_tools/virtualenv/setup.cfg b/testing/mozharness/external_tools/virtualenv/setup.cfg
new file mode 100644
index 000000000..6662fa569
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/setup.cfg
@@ -0,0 +1,8 @@
+[bdist_wheel]
+universal = 1
+
+[egg_info]
+tag_date = 0
+tag_build =
+tag_svn_revision = 0
+
diff --git a/testing/mozharness/external_tools/virtualenv/setup.py b/testing/mozharness/external_tools/virtualenv/setup.py
new file mode 100644
index 000000000..ee03bc531
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/setup.py
@@ -0,0 +1,123 @@
+import os
+import re
+import shutil
+import sys
+
+if sys.version_info[:2] < (2, 6):
+ sys.exit('virtualenv requires Python 2.6 or higher.')
+
+try:
+ from setuptools import setup
+ from setuptools.command.test import test as TestCommand
+
+ class PyTest(TestCommand):
+ user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
+
+ def initialize_options(self):
+ TestCommand.initialize_options(self)
+ self.pytest_args = []
+
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ #self.test_args = []
+ #self.test_suite = True
+
+ def run_tests(self):
+ # import here, because outside the eggs aren't loaded
+ import pytest
+ sys.exit(pytest.main(self.pytest_args))
+
+ setup_params = {
+ 'entry_points': {
+ 'console_scripts': ['virtualenv=virtualenv:main'],
+ },
+ 'zip_safe': False,
+ 'cmdclass': {'test': PyTest},
+ 'tests_require': ['pytest', 'mock'],
+ }
+except ImportError:
+ from distutils.core import setup
+ if sys.platform == 'win32':
+ print('Note: without Setuptools installed you will '
+ 'have to use "python -m virtualenv ENV"')
+ setup_params = {}
+ else:
+ script = 'scripts/virtualenv'
+ setup_params = {'scripts': [script]}
+
+
+def read_file(*paths):
+ here = os.path.dirname(os.path.abspath(__file__))
+ with open(os.path.join(here, *paths)) as f:
+ return f.read()
+
+# Get long_description from index.rst:
+long_description = read_file('docs', 'index.rst')
+long_description = long_description.strip().split('split here', 1)[0]
+# Add release history
+changes = read_file('docs', 'changes.rst')
+# Only report last two releases for brevity
+releases_found = 0
+change_lines = []
+for line in changes.splitlines():
+ change_lines.append(line)
+ if line.startswith('--------------'):
+ releases_found += 1
+ if releases_found > 2:
+ break
+
+changes = '\n'.join(change_lines[:-2]) + '\n'
+changes += '`Full Changelog <https://virtualenv.pypa.io/en/latest/changes.html>`_.'
+# Replace issue/pull directives
+changes = re.sub(r':pull:`(\d+)`', r'PR #\1', changes)
+changes = re.sub(r':issue:`(\d+)`', r'#\1', changes)
+
+long_description += '\n\n' + changes
+
+
+def get_version():
+ version_file = read_file('virtualenv.py')
+ version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
+ version_file, re.M)
+ if version_match:
+ return version_match.group(1)
+ raise RuntimeError("Unable to find version string.")
+
+
+# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
+# exit of python setup.py test # in multiprocessing/util.py _exit_function when
+# running python setup.py test (see
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
+try:
+ import multiprocessing # noqa
+except ImportError:
+ pass
+
+setup(
+ name='virtualenv',
+ version=get_version(),
+ description="Virtual Python Environment builder",
+ long_description=long_description,
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ ],
+ keywords='setuptools deployment installation distutils',
+ author='Ian Bicking',
+ author_email='ianb@colorstudy.com',
+ maintainer='Jannis Leidel, Carl Meyer and Brian Rosner',
+ maintainer_email='python-virtualenv@groups.google.com',
+ url='https://virtualenv.pypa.io/',
+ license='MIT',
+ py_modules=['virtualenv'],
+ packages=['virtualenv_support'],
+ package_data={'virtualenv_support': ['*.whl']},
+ **setup_params)
diff --git a/testing/mozharness/external_tools/virtualenv/site.py b/testing/mozharness/external_tools/virtualenv/site.py
new file mode 100644
index 000000000..4e426cdb6
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/site.py
@@ -0,0 +1,760 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code. Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path. On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages. On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely). The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path. Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once. Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth. Assume foo.pth contains the
+following:
+
+ # foo package configuration
+ foo
+ bar
+ bletch
+
+and bar.pth contains:
+
+ # bar package configuration
+ bar
+
+Then the following directories are added to sys.path, in this order:
+
+ /usr/local/lib/python2.X/site-packages/bar
+ /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations. If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+
+try:
+ import __builtin__ as builtins
+except ImportError:
+ import builtins
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+ ModuleType = type(os)
+
+def makepath(*paths):
+ dir = os.path.join(*paths)
+ if _is_jython and (dir == '__classpath__' or
+ dir.startswith('__pyclasspath__')):
+ return dir, dir
+ dir = os.path.abspath(dir)
+ return dir, os.path.normcase(dir)
+
+def abs__file__():
+ """Set all module' __file__ attribute to an absolute path"""
+ for m in sys.modules.values():
+ if ((_is_jython and not isinstance(m, ModuleType)) or
+ hasattr(m, '__loader__')):
+ # only modules need the abspath in Jython. and don't mess
+ # with a PEP 302-supplied __file__
+ continue
+ f = getattr(m, '__file__', None)
+ if f is None:
+ continue
+ m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+ """ Remove duplicate entries from sys.path along with making them
+ absolute"""
+ # This ensures that the initial path provided by the interpreter contains
+ # only absolute pathnames, even if we're running from the build directory.
+ L = []
+ known_paths = set()
+ for dir in sys.path:
+ # Filter out duplicate paths (on case-insensitive file systems also
+ # if they only differ in case); turn relative paths into absolute
+ # paths.
+ dir, dircase = makepath(dir)
+ if not dircase in known_paths:
+ L.append(dir)
+ known_paths.add(dircase)
+ sys.path[:] = L
+ return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python. See http://www.python.org/sf/586680
+def addbuilddir():
+ """Append ./build/lib.<platform> in case we're running in the build dir
+ (especially for Guido :-)"""
+ from distutils.util import get_platform
+ s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+ if hasattr(sys, 'gettotalrefcount'):
+ s += '-pydebug'
+ s = os.path.join(os.path.dirname(sys.path[-1]), s)
+ sys.path.append(s)
+
+def _init_pathinfo():
+ """Return a set containing all existing directory entries from sys.path"""
+ d = set()
+ for dir in sys.path:
+ try:
+ if os.path.isdir(dir):
+ dir, dircase = makepath(dir)
+ d.add(dircase)
+ except TypeError:
+ continue
+ return d
+
+def addpackage(sitedir, name, known_paths):
+ """Add a new path to known_paths by combining sitedir and 'name' or execute
+ sitedir if it starts with 'import'"""
+ if known_paths is None:
+ _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ fullname = os.path.join(sitedir, name)
+ try:
+ f = open(fullname, "rU")
+ except IOError:
+ return
+ try:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ if line.startswith("import"):
+ exec(line)
+ continue
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if not dircase in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ finally:
+ f.close()
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+ """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+ 'sitedir'"""
+ if known_paths is None:
+ known_paths = _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ sitedir, sitedircase = makepath(sitedir)
+ if not sitedircase in known_paths:
+ sys.path.append(sitedir) # Add path component
+ try:
+ names = os.listdir(sitedir)
+ except os.error:
+ return
+ names.sort()
+ for name in names:
+ if name.endswith(os.extsep + "pth"):
+ addpackage(sitedir, name, known_paths)
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+ """Add site-packages (and possibly site-python) to sys.path"""
+ prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+ if exec_prefix != sys_prefix:
+ prefixes.append(os.path.join(exec_prefix, "local"))
+
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos') or _is_jython:
+ sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+ elif _is_pypy:
+ sitedirs = [os.path.join(prefix, 'site-packages')]
+ elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+ if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+ sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(prefix, "Extras", "lib", "python")]
+
+ else: # any other Python distros on OSX work this way
+ sitedirs = [os.path.join(prefix, "lib",
+ "python" + sys.version[:3], "site-packages")]
+
+ elif os.sep == '/':
+ sitedirs = [os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python"),
+ os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+ lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+ if (os.path.exists(lib64_dir) and
+ os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+ if _is_64bit:
+ sitedirs.insert(0, lib64_dir)
+ else:
+ sitedirs.append(lib64_dir)
+ try:
+ # sys.getobjects only available in --with-pydebug build
+ sys.getobjects
+ sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+ except AttributeError:
+ pass
+ # Debian-specific dist-packages directories:
+ sitedirs.append(os.path.join(prefix, "local/lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ if sys.version[0] == '2':
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ else:
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[0],
+ "dist-packages"))
+ sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+ else:
+ sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for sitedir in sitedirs:
+ if os.path.isdir(sitedir):
+ addsitedir(sitedir, known_paths)
+ return None
+
+def check_enableusersite():
+ """Check if user site directory is safe for inclusion
+
+ The function tests for the command line flag (including environment var),
+ process uid/gid equal to effective uid/gid.
+
+ None: Disabled for security reasons
+ False: Disabled by user (command line option)
+ True: Safe and enabled
+ """
+ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+ return False
+
+ if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+ # check process uid == effective uid
+ if os.geteuid() != os.getuid():
+ return None
+ if hasattr(os, "getgid") and hasattr(os, "getegid"):
+ # check process gid == effective gid
+ if os.getegid() != os.getgid():
+ return None
+
+ return True
+
+def addusersitepackages(known_paths):
+ """Add a per user site-package to sys.path
+
+ Each user has its own python directory with site-packages in the
+ home directory.
+
+ USER_BASE is the root directory for all Python versions
+
+ USER_SITE is the user specific site-packages directory
+
+ USER_SITE/.. can be used for data.
+ """
+ global USER_BASE, USER_SITE, ENABLE_USER_SITE
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ #if sys.platform in ('os2emx', 'riscos'):
+ # # Don't know what to put here
+ # USER_BASE = ''
+ # USER_SITE = ''
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser(base, "Python")
+ USER_SITE = os.path.join(USER_BASE,
+ "Python" + sys.version[0] + sys.version[2],
+ "site-packages")
+ else:
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser("~", ".local")
+ USER_SITE = os.path.join(USER_BASE, "lib",
+ "python" + sys.version[:3],
+ "site-packages")
+
+ if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+ addsitedir(USER_SITE, known_paths)
+ if ENABLE_USER_SITE:
+ for dist_libdir in ("lib", "local/lib"):
+ user_site = os.path.join(USER_BASE, dist_libdir,
+ "python" + sys.version[:3],
+ "dist-packages")
+ if os.path.isdir(user_site):
+ addsitedir(user_site, known_paths)
+ return known_paths
+
+
+
+def setBEGINLIBPATH():
+ """The OS/2 EMX port has optional extension modules that do double duty
+ as DLLs (and must use the .DLL file extension) for other extensions.
+ The library search path needs to be amended so these will be found
+ during module import. Use BEGINLIBPATH so that these are at the start
+ of the library search path.
+
+ """
+ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+ libpath = os.environ['BEGINLIBPATH'].split(';')
+ if libpath[-1]:
+ libpath.append(dllpath)
+ else:
+ libpath[-1] = dllpath
+ os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+ """Define new built-ins 'quit' and 'exit'.
+ These are simply strings that display a hint on how to exit.
+
+ """
+ if os.sep == ':':
+ eof = 'Cmd-Q'
+ elif os.sep == '\\':
+ eof = 'Ctrl-Z plus Return'
+ else:
+ eof = 'Ctrl-D (i.e. EOF)'
+
+ class Quitter(object):
+ def __init__(self, name):
+ self.name = name
+ def __repr__(self):
+ return 'Use %s() or %s to exit' % (self.name, eof)
+ def __call__(self, code=None):
+ # Shells like IDLE catch the SystemExit, but listen when their
+ # stdin wrapper is closed.
+ try:
+ sys.stdin.close()
+ except:
+ pass
+ raise SystemExit(code)
+ builtins.quit = Quitter('quit')
+ builtins.exit = Quitter('exit')
+
+
+class _Printer(object):
+ """interactive prompt objects for printing the license text, a list of
+ contributors and the copyright notice."""
+
+ MAXLINES = 23
+
+ def __init__(self, name, data, files=(), dirs=()):
+ self.__name = name
+ self.__data = data
+ self.__files = files
+ self.__dirs = dirs
+ self.__lines = None
+
+ def __setup(self):
+ if self.__lines:
+ return
+ data = None
+ for dir in self.__dirs:
+ for filename in self.__files:
+ filename = os.path.join(dir, filename)
+ try:
+ fp = open(filename, "rU")
+ data = fp.read()
+ fp.close()
+ break
+ except IOError:
+ pass
+ if data:
+ break
+ if not data:
+ data = self.__data
+ self.__lines = data.split('\n')
+ self.__linecnt = len(self.__lines)
+
+ def __repr__(self):
+ self.__setup()
+ if len(self.__lines) <= self.MAXLINES:
+ return "\n".join(self.__lines)
+ else:
+ return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+ def __call__(self):
+ self.__setup()
+ prompt = 'Hit Return for more, or q (and Return) to quit: '
+ lineno = 0
+ while 1:
+ try:
+ for i in range(lineno, lineno + self.MAXLINES):
+ print(self.__lines[i])
+ except IndexError:
+ break
+ else:
+ lineno += self.MAXLINES
+ key = None
+ while key is None:
+ try:
+ key = raw_input(prompt)
+ except NameError:
+ key = input(prompt)
+ if key not in ('', 'q'):
+ key = None
+ if key == 'q':
+ break
+
+def setcopyright():
+ """Set 'copyright' and 'credits' in __builtin__"""
+ builtins.copyright = _Printer("copyright", sys.copyright)
+ if _is_jython:
+ builtins.credits = _Printer(
+ "credits",
+ "Jython is maintained by the Jython developers (www.jython.org).")
+ elif _is_pypy:
+ builtins.credits = _Printer(
+ "credits",
+ "PyPy is maintained by the PyPy developers: http://pypy.org/")
+ else:
+ builtins.credits = _Printer("credits", """\
+ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+ for supporting Python development. See www.python.org for more information.""")
+ here = os.path.dirname(os.__file__)
+ builtins.license = _Printer(
+ "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+ ["LICENSE.txt", "LICENSE"],
+ [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+ """Define the built-in 'help'.
+ This is a wrapper around pydoc.help (with a twist).
+
+ """
+
+ def __repr__(self):
+ return "Type help() for interactive help, " \
+ "or help(object) for help about object."
+ def __call__(self, *args, **kwds):
+ import pydoc
+ return pydoc.help(*args, **kwds)
+
+def sethelper():
+ builtins.help = _Helper()
+
+def aliasmbcs():
+ """On Windows, some default encodings are not provided by Python,
+ while they are always available as "mbcs" in each locale. Make
+ them usable by aliasing to "mbcs" in such a case."""
+ if sys.platform == 'win32':
+ import locale, codecs
+ enc = locale.getdefaultlocale()[1]
+ if enc.startswith('cp'): # "cp***" ?
+ try:
+ codecs.lookup(enc)
+ except LookupError:
+ import encodings
+ encodings._cache[enc] = encodings._unknown
+ encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+ """Set the string encoding used by the Unicode implementation. The
+ default is 'ascii', but if you're willing to experiment, you can
+ change this."""
+ encoding = "ascii" # Default value set by _PyUnicode_Init()
+ if 0:
+ # Enable to support locale aware default string encodings.
+ import locale
+ loc = locale.getdefaultlocale()
+ if loc[1]:
+ encoding = loc[1]
+ if 0:
+ # Enable to switch off string to Unicode coercion and implicit
+ # Unicode to string conversion.
+ encoding = "undefined"
+ if encoding != "ascii":
+ # On Non-Unicode builds this will raise an AttributeError...
+ sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+ """Run custom site specific code, if available."""
+ try:
+ import sitecustomize
+ except ImportError:
+ pass
+
+def virtual_install_main_packages():
+ f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+ sys.real_prefix = f.read().strip()
+ f.close()
+ pos = 2
+ hardcoded_relative_dirs = []
+ if sys.path[0] == '':
+ pos += 1
+ if _is_jython:
+ paths = [os.path.join(sys.real_prefix, 'Lib')]
+ elif _is_pypy:
+ if sys.version_info > (3, 2):
+ cpyver = '%d' % sys.version_info[0]
+ elif sys.pypy_version_info >= (1, 5):
+ cpyver = '%d.%d' % sys.version_info[:2]
+ else:
+ cpyver = '%d.%d.%d' % sys.version_info[:3]
+ paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+ os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+ if sys.pypy_version_info < (1, 9):
+ paths.insert(1, os.path.join(sys.real_prefix,
+ 'lib-python', 'modified-%s' % cpyver))
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ #
+ # This is hardcoded in the Python executable, but relative to sys.prefix:
+ for path in paths[:]:
+ plat_path = os.path.join(path, 'plat-%s' % sys.platform)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # MOZ: The MSYS2 and MinGW versions of Python have their main packages in the UNIX directory this checks specifically for the native win32 python
+ elif sys.platform == 'win32' and os.sep == '\\':
+ paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+ else:
+ paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+ if os.path.exists(lib64_path):
+ if _is_64bit:
+ paths.insert(0, lib64_path)
+ else:
+ paths.append(lib64_path)
+ # This is hardcoded in the Python executable, but relative to
+ # sys.prefix. Debian change: we need to add the multiarch triplet
+ # here, which is where the real stuff lives. As per PEP 421, in
+ # Python 3.3+, this lives in sys.implementation, while in Python 2.7
+ # it lives in sys.
+ try:
+ arch = getattr(sys, 'implementation', sys)._multiarch
+ except AttributeError:
+ # This is a non-multiarch aware Python. Fallback to the old way.
+ arch = sys.platform
+ plat_path = os.path.join(sys.real_prefix, 'lib',
+ 'python'+sys.version[:3],
+ 'plat-%s' % arch)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # This is hardcoded in the Python executable, but
+ # relative to sys.prefix, so we have to fix up:
+ for path in list(paths):
+ tk_dir = os.path.join(path, 'lib-tk')
+ if os.path.exists(tk_dir):
+ paths.append(tk_dir)
+
+ # These are hardcoded in the Apple's Python executable,
+ # but relative to sys.prefix, so we have to fix them up:
+ if sys.platform == 'darwin':
+ hardcoded_paths = [os.path.join(relative_dir, module)
+ for relative_dir in hardcoded_relative_dirs
+ for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+ for path in hardcoded_paths:
+ if os.path.exists(path):
+ paths.append(path)
+
+ sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+ """
+ Force easy_installed eggs in the global environment to get placed
+ in sys.path after all packages inside the virtualenv. This
+ maintains the "least surprise" result that packages in the
+ virtualenv always mask global packages, never the other way
+ around.
+
+ """
+ egginsert = getattr(sys, '__egginsert', 0)
+ for i, path in enumerate(sys.path):
+ if i > egginsert and path.startswith(sys.prefix):
+ egginsert = i
+ sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+ force_global_eggs_after_local_site_packages()
+ return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+ """Adjust the special classpath sys.path entries for Jython. These
+ entries should follow the base virtualenv lib directories.
+ """
+ paths = []
+ classpaths = []
+ for path in sys.path:
+ if path == '__classpath__' or path.startswith('__pyclasspath__'):
+ classpaths.append(path)
+ else:
+ paths.append(path)
+ sys.path = paths
+ sys.path.extend(classpaths)
+
+def execusercustomize():
+ """Run custom user specific code, if available."""
+ try:
+ import usercustomize
+ except ImportError:
+ pass
+
+
+def main():
+ global ENABLE_USER_SITE
+ virtual_install_main_packages()
+ abs__file__()
+ paths_in_sys = removeduppaths()
+ if (os.name == "posix" and sys.path and
+ os.path.basename(sys.path[-1]) == "Modules"):
+ addbuilddir()
+ if _is_jython:
+ fixclasspath()
+ GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+ if not GLOBAL_SITE_PACKAGES:
+ ENABLE_USER_SITE = False
+ if ENABLE_USER_SITE is None:
+ ENABLE_USER_SITE = check_enableusersite()
+ paths_in_sys = addsitepackages(paths_in_sys)
+ paths_in_sys = addusersitepackages(paths_in_sys)
+ if GLOBAL_SITE_PACKAGES:
+ paths_in_sys = virtual_addsitepackages(paths_in_sys)
+ if sys.platform == 'os2emx':
+ setBEGINLIBPATH()
+ setquit()
+ setcopyright()
+ sethelper()
+ aliasmbcs()
+ setencoding()
+ execsitecustomize()
+ if ENABLE_USER_SITE:
+ execusercustomize()
+ # Remove sys.setdefaultencoding() so that users cannot change the
+ # encoding after initialization. The test for presence is needed when
+ # this module is run as a script, because this code is executed twice.
+ if hasattr(sys, "setdefaultencoding"):
+ del sys.setdefaultencoding
+
+main()
+
+def _script():
+ help = """\
+ %s [--user-base] [--user-site]
+
+ Without arguments print some useful information
+ With arguments print the value of USER_BASE and/or USER_SITE separated
+ by '%s'.
+
+ Exit codes with --user-base or --user-site:
+ 0 - user site directory is enabled
+ 1 - user site directory is disabled by user
+ 2 - uses site directory is disabled by super user
+ or for security reasons
+ >2 - unknown error
+ """
+ args = sys.argv[1:]
+ if not args:
+ print("sys.path = [")
+ for dir in sys.path:
+ print(" %r," % (dir,))
+ print("]")
+ def exists(path):
+ if os.path.isdir(path):
+ return "exists"
+ else:
+ return "doesn't exist"
+ print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
+ print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
+ print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE)
+ sys.exit(0)
+
+ buffer = []
+ if '--user-base' in args:
+ buffer.append(USER_BASE)
+ if '--user-site' in args:
+ buffer.append(USER_SITE)
+
+ if buffer:
+ print(os.pathsep.join(buffer))
+ if ENABLE_USER_SITE:
+ sys.exit(0)
+ elif ENABLE_USER_SITE is False:
+ sys.exit(1)
+ elif ENABLE_USER_SITE is None:
+ sys.exit(2)
+ else:
+ sys.exit(3)
+ else:
+ import textwrap
+ print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+ sys.exit(10)
+
+if __name__ == '__main__':
+ _script()
diff --git a/testing/mozharness/external_tools/virtualenv/tests/__init__.py b/testing/mozharness/external_tools/virtualenv/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/tests/__init__.py
diff --git a/testing/mozharness/external_tools/virtualenv/tests/test_activate.sh b/testing/mozharness/external_tools/virtualenv/tests/test_activate.sh
new file mode 100755
index 000000000..e27727386
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/tests/test_activate.sh
@@ -0,0 +1,96 @@
+#!/bin/sh
+
+set -u
+
+ROOT="$(dirname $0)/.."
+VIRTUALENV="${ROOT}/virtualenv.py"
+TESTENV="/tmp/test_virtualenv_activate.venv"
+
+rm -rf ${TESTENV}
+
+echo "$0: Creating virtualenv ${TESTENV}..." 1>&2
+
+${VIRTUALENV} ${TESTENV} | tee ${ROOT}/tests/test_activate_output.actual
+if ! diff ${ROOT}/tests/test_activate_output.expected ${ROOT}/tests/test_activate_output.actual; then
+ echo "$0: Failed to get expected output from ${VIRTUALENV}!" 1>&2
+ exit 1
+fi
+
+echo "$0: Created virtualenv ${TESTENV}." 1>&2
+
+echo "$0: Activating ${TESTENV}..." 1>&2
+. ${TESTENV}/bin/activate
+echo "$0: Activated ${TESTENV}." 1>&2
+
+echo "$0: Checking value of \$VIRTUAL_ENV..." 1>&2
+
+if [ "$VIRTUAL_ENV" != "${TESTENV}" ]; then
+ echo "$0: Expected \$VIRTUAL_ENV to be set to \"${TESTENV}\"; actual value: \"${VIRTUAL_ENV}\"!" 1>&2
+ exit 2
+fi
+
+echo "$0: \$VIRTUAL_ENV = \"${VIRTUAL_ENV}\" -- OK." 1>&2
+
+echo "$0: Checking output of \$(which python)..." 1>&2
+
+if [ "$(which python)" != "${TESTENV}/bin/python" ]; then
+ echo "$0: Expected \$(which python) to return \"${TESTENV}/bin/python\"; actual value: \"$(which python)\"!" 1>&2
+ exit 3
+fi
+
+echo "$0: Output of \$(which python) is OK." 1>&2
+
+echo "$0: Checking output of \$(which pip)..." 1>&2
+
+if [ "$(which pip)" != "${TESTENV}/bin/pip" ]; then
+ echo "$0: Expected \$(which pip) to return \"${TESTENV}/bin/pip\"; actual value: \"$(which pip)\"!" 1>&2
+ exit 4
+fi
+
+echo "$0: Output of \$(which pip) is OK." 1>&2
+
+echo "$0: Checking output of \$(which easy_install)..." 1>&2
+
+if [ "$(which easy_install)" != "${TESTENV}/bin/easy_install" ]; then
+ echo "$0: Expected \$(which easy_install) to return \"${TESTENV}/bin/easy_install\"; actual value: \"$(which easy_install)\"!" 1>&2
+ exit 5
+fi
+
+echo "$0: Output of \$(which easy_install) is OK." 1>&2
+
+echo "$0: Executing a simple Python program..." 1>&2
+
+TESTENV=${TESTENV} python <<__END__
+import os, sys
+
+expected_site_packages = os.path.join(os.environ['TESTENV'], 'lib','python%s' % sys.version[:3], 'site-packages')
+site_packages = os.path.join(os.environ['VIRTUAL_ENV'], 'lib', 'python%s' % sys.version[:3], 'site-packages')
+
+assert site_packages == expected_site_packages, 'site_packages did not have expected value; actual value: %r' % site_packages
+
+open(os.path.join(site_packages, 'pydoc_test.py'), 'w').write('"""This is pydoc_test.py"""\n')
+__END__
+
+if [ $? -ne 0 ]; then
+ echo "$0: Python script failed!" 1>&2
+ exit 6
+fi
+
+echo "$0: Execution of a simple Python program -- OK." 1>&2
+
+echo "$0: Testing pydoc..." 1>&2
+
+if ! PAGER=cat pydoc pydoc_test | grep 'This is pydoc_test.py' > /dev/null; then
+ echo "$0: pydoc test failed!" 1>&2
+ exit 7
+fi
+
+echo "$0: pydoc is OK." 1>&2
+
+echo "$0: Deactivating ${TESTENV}..." 1>&2
+deactivate
+echo "$0: Deactivated ${TESTENV}." 1>&2
+echo "$0: OK!" 1>&2
+
+rm -rf ${TESTENV}
+
diff --git a/testing/mozharness/external_tools/virtualenv/tests/test_activate_output.expected b/testing/mozharness/external_tools/virtualenv/tests/test_activate_output.expected
new file mode 100644
index 000000000..d49469feb
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/tests/test_activate_output.expected
@@ -0,0 +1,2 @@
+New python executable in /tmp/test_virtualenv_activate.venv/bin/python
+Installing setuptools, pip, wheel...done.
diff --git a/testing/mozharness/external_tools/virtualenv/tests/test_cmdline.py b/testing/mozharness/external_tools/virtualenv/tests/test_cmdline.py
new file mode 100644
index 000000000..9682ef003
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/tests/test_cmdline.py
@@ -0,0 +1,44 @@
+import sys
+import subprocess
+import virtualenv
+import pytest
+
+VIRTUALENV_SCRIPT = virtualenv.__file__
+
+def test_commandline_basic(tmpdir):
+ """Simple command line usage should work"""
+ subprocess.check_call([
+ sys.executable,
+ VIRTUALENV_SCRIPT,
+ str(tmpdir.join('venv'))
+ ])
+
+def test_commandline_explicit_interp(tmpdir):
+ """Specifying the Python interpreter should work"""
+ subprocess.check_call([
+ sys.executable,
+ VIRTUALENV_SCRIPT,
+ '-p', sys.executable,
+ str(tmpdir.join('venv'))
+ ])
+
+# The registry lookups to support the abbreviated "-p 3.5" form of specifying
+# a Python interpreter on Windows don't seem to work with Python 3.5. The
+# registry layout is not well documented, and it's not clear that the feature
+# is sufficiently widely used to be worth fixing.
+# See https://github.com/pypa/virtualenv/issues/864
+@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info[:2] >= (3,5)")
+def test_commandline_abbrev_interp(tmpdir):
+ """Specifying abbreviated forms of the Python interpreter should work"""
+ if sys.platform == 'win32':
+ fmt = '%s.%s'
+ else:
+ fmt = 'python%s.%s'
+ abbrev = fmt % (sys.version_info[0], sys.version_info[1])
+ subprocess.check_call([
+ sys.executable,
+ VIRTUALENV_SCRIPT,
+ '-p', abbrev,
+ str(tmpdir.join('venv'))
+ ])
+
diff --git a/testing/mozharness/external_tools/virtualenv/tests/test_virtualenv.py b/testing/mozharness/external_tools/virtualenv/tests/test_virtualenv.py
new file mode 100644
index 000000000..756cde936
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/tests/test_virtualenv.py
@@ -0,0 +1,139 @@
+import virtualenv
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+import pytest
+import platform # noqa
+
+from mock import patch, Mock
+
+
+def test_version():
+ """Should have a version string"""
+ assert virtualenv.virtualenv_version, "Should have version"
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_absolute_path(mock_exists):
+ """Should return absolute path if given and exists"""
+ mock_exists.return_value = True
+ virtualenv.is_executable = Mock(return_value=True)
+ test_abs_path = os.path.abspath("/usr/bin/python53")
+
+ exe = virtualenv.resolve_interpreter(test_abs_path)
+
+ assert exe == test_abs_path, "Absolute path should return as is"
+ mock_exists.assert_called_with(test_abs_path)
+ virtualenv.is_executable.assert_called_with(test_abs_path)
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_nonexistent_interpreter(mock_exists):
+ """Should SystemExit with an nonexistent python interpreter path"""
+ mock_exists.return_value = False
+
+ with pytest.raises(SystemExit):
+ virtualenv.resolve_interpreter("/usr/bin/python53")
+
+ mock_exists.assert_called_with("/usr/bin/python53")
+
+
+@patch('os.path.exists')
+def test_resolve_interpreter_with_invalid_interpreter(mock_exists):
+ """Should exit when with absolute path if not exists"""
+ mock_exists.return_value = True
+ virtualenv.is_executable = Mock(return_value=False)
+ invalid = os.path.abspath("/usr/bin/pyt_hon53")
+
+ with pytest.raises(SystemExit):
+ virtualenv.resolve_interpreter(invalid)
+
+ mock_exists.assert_called_with(invalid)
+ virtualenv.is_executable.assert_called_with(invalid)
+
+
+def test_activate_after_future_statements():
+ """Should insert activation line after last future statement"""
+ script = [
+ '#!/usr/bin/env python',
+ 'from __future__ import with_statement',
+ 'from __future__ import print_function',
+ 'print("Hello, world!")'
+ ]
+ assert virtualenv.relative_script(script) == [
+ '#!/usr/bin/env python',
+ 'from __future__ import with_statement',
+ 'from __future__ import print_function',
+ '',
+ "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this",
+ '',
+ 'print("Hello, world!")'
+ ]
+
+
+def test_cop_update_defaults_with_store_false():
+ """store_false options need reverted logic"""
+ class MyConfigOptionParser(virtualenv.ConfigOptionParser):
+ def __init__(self, *args, **kwargs):
+ self.config = virtualenv.ConfigParser.RawConfigParser()
+ self.files = []
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def get_environ_vars(self, prefix='VIRTUALENV_'):
+ yield ("no_site_packages", "1")
+
+ cop = MyConfigOptionParser()
+ cop.add_option(
+ '--no-site-packages',
+ dest='system_site_packages',
+ action='store_false',
+ help="Don't give access to the global site-packages dir to the "
+ "virtual environment (default)")
+
+ defaults = {}
+ cop.update_defaults(defaults)
+ assert defaults == {'system_site_packages': 0}
+
+def test_install_python_bin():
+ """Should create the right python executables and links"""
+ tmp_virtualenv = tempfile.mkdtemp()
+ try:
+ home_dir, lib_dir, inc_dir, bin_dir = \
+ virtualenv.path_locations(tmp_virtualenv)
+ virtualenv.install_python(home_dir, lib_dir, inc_dir, bin_dir, False,
+ False)
+
+ if virtualenv.is_win:
+ required_executables = [ 'python.exe', 'pythonw.exe']
+ else:
+ py_exe_no_version = 'python'
+ py_exe_version_major = 'python%s' % sys.version_info[0]
+ py_exe_version_major_minor = 'python%s.%s' % (
+ sys.version_info[0], sys.version_info[1])
+ required_executables = [ py_exe_no_version, py_exe_version_major,
+ py_exe_version_major_minor ]
+
+ for pth in required_executables:
+ assert os.path.exists(os.path.join(bin_dir, pth)), ("%s should "
+ "exist in bin_dir" % pth)
+ finally:
+ shutil.rmtree(tmp_virtualenv)
+
+
+@pytest.mark.skipif("platform.python_implementation() == 'PyPy'")
+def test_always_copy_option():
+ """Should be no symlinks in directory tree"""
+ tmp_virtualenv = tempfile.mkdtemp()
+ ve_path = os.path.join(tmp_virtualenv, 'venv')
+ try:
+ virtualenv.create_environment(ve_path, symlink=False)
+
+ for root, dirs, files in os.walk(tmp_virtualenv):
+ for f in files + dirs:
+ full_name = os.path.join(root, f)
+ assert not os.path.islink(full_name), "%s should not be a" \
+ " symlink (to %s)" % (full_name, os.readlink(full_name))
+ finally:
+ shutil.rmtree(tmp_virtualenv)
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv.py b/testing/mozharness/external_tools/virtualenv/virtualenv.py
new file mode 100755
index 000000000..e363021cc
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv.py
@@ -0,0 +1,2329 @@
+#!/usr/bin/env python
+"""Create a "virtual" Python installation"""
+
+import os
+import sys
+
+# If we are running in a new interpreter to create a virtualenv,
+# we do NOT want paths from our existing location interfering with anything,
+# So we remove this file's directory from sys.path - most likely to be
+# the previous interpreter's site-packages. Solves #705, #763, #779
+if os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+ for path in sys.path[:]:
+ if os.path.realpath(os.path.dirname(__file__)) == os.path.realpath(path):
+ sys.path.remove(path)
+
+import base64
+import codecs
+import optparse
+import re
+import shutil
+import logging
+import zlib
+import errno
+import glob
+import distutils.sysconfig
+import struct
+import subprocess
+import pkgutil
+import tempfile
+import textwrap
+from distutils.util import strtobool
+from os.path import join
+
+try:
+ import ConfigParser
+except ImportError:
+ import configparser as ConfigParser
+
+__version__ = "15.0.1"
+virtualenv_version = __version__ # legacy
+
+if sys.version_info < (2, 6):
+ print('ERROR: %s' % sys.exc_info()[1])
+ print('ERROR: this script requires Python 2.6 or greater.')
+ sys.exit(101)
+
+try:
+ basestring
+except NameError:
+ basestring = str
+
+py_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
+
+is_jython = sys.platform.startswith('java')
+is_pypy = hasattr(sys, 'pypy_version_info')
+is_win = (sys.platform == 'win32' and os.sep == '\\')
+is_cygwin = (sys.platform == 'cygwin')
+is_msys2 = (sys.platform == 'win32' and os.sep == '/')
+is_darwin = (sys.platform == 'darwin')
+abiflags = getattr(sys, 'abiflags', '')
+
+user_dir = os.path.expanduser('~')
+if is_win:
+ default_storage_dir = os.path.join(user_dir, 'virtualenv')
+else:
+ default_storage_dir = os.path.join(user_dir, '.virtualenv')
+default_config_file = os.path.join(default_storage_dir, 'virtualenv.ini')
+
+if is_pypy:
+ expected_exe = 'pypy'
+elif is_jython:
+ expected_exe = 'jython'
+else:
+ expected_exe = 'python'
+
+# Return a mapping of version -> Python executable
+# Only provided for Windows, where the information in the registry is used
+if not is_win:
+ def get_installed_pythons():
+ return {}
+else:
+ try:
+ import winreg
+ except ImportError:
+ import _winreg as winreg
+
+ def get_installed_pythons():
+ try:
+ python_core = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE,
+ "Software\\Python\\PythonCore")
+ except WindowsError:
+ # No registered Python installations
+ return {}
+ i = 0
+ versions = []
+ while True:
+ try:
+ versions.append(winreg.EnumKey(python_core, i))
+ i = i + 1
+ except WindowsError:
+ break
+ exes = dict()
+ for ver in versions:
+ try:
+ path = winreg.QueryValue(python_core, "%s\\InstallPath" % ver)
+ except WindowsError:
+ continue
+ exes[ver] = join(path, "python.exe")
+
+ winreg.CloseKey(python_core)
+
+ # Add the major versions
+ # Sort the keys, then repeatedly update the major version entry
+ # Last executable (i.e., highest version) wins with this approach
+ for ver in sorted(exes):
+ exes[ver[0]] = exes[ver]
+
+ return exes
+
+REQUIRED_MODULES = ['os', 'posix', 'posixpath', 'nt', 'ntpath', 'genericpath',
+ 'fnmatch', 'locale', 'encodings', 'codecs',
+ 'stat', 'UserDict', 'readline', 'copy_reg', 'types',
+ 're', 'sre', 'sre_parse', 'sre_constants', 'sre_compile',
+ 'zlib']
+
+REQUIRED_FILES = ['lib-dynload', 'config']
+
+majver, minver = sys.version_info[:2]
+if majver == 2:
+ if minver >= 6:
+ REQUIRED_MODULES.extend(['warnings', 'linecache', '_abcoll', 'abc'])
+ if minver >= 7:
+ REQUIRED_MODULES.extend(['_weakrefset'])
+ if is_msys2:
+ REQUIRED_MODULES.extend(['functools'])
+elif majver == 3:
+ # Some extra modules are needed for Python 3, but different ones
+ # for different versions.
+ REQUIRED_MODULES.extend([
+ '_abcoll', 'warnings', 'linecache', 'abc', 'io', '_weakrefset',
+ 'copyreg', 'tempfile', 'random', '__future__', 'collections',
+ 'keyword', 'tarfile', 'shutil', 'struct', 'copy', 'tokenize',
+ 'token', 'functools', 'heapq', 'bisect', 'weakref', 'reprlib'
+ ])
+ if minver >= 2:
+ REQUIRED_FILES[-1] = 'config-%s' % majver
+ if minver >= 3:
+ import sysconfig
+ platdir = sysconfig.get_config_var('PLATDIR')
+ REQUIRED_FILES.append(platdir)
+ REQUIRED_MODULES.extend([
+ 'base64', '_dummy_thread', 'hashlib', 'hmac',
+ 'imp', 'importlib', 'rlcompleter'
+ ])
+ if minver >= 4:
+ REQUIRED_MODULES.extend([
+ 'operator',
+ '_collections_abc',
+ '_bootlocale',
+ ])
+
+if is_pypy:
+ # these are needed to correctly display the exceptions that may happen
+ # during the bootstrap
+ REQUIRED_MODULES.extend(['traceback', 'linecache'])
+
+
+class Logger(object):
+
+ """
+ Logging object for use in command-line script. Allows ranges of
+ levels, to avoid some redundancy of displayed information.
+ """
+
+ DEBUG = logging.DEBUG
+ INFO = logging.INFO
+ NOTIFY = (logging.INFO+logging.WARN)/2
+ WARN = WARNING = logging.WARN
+ ERROR = logging.ERROR
+ FATAL = logging.FATAL
+
+ LEVELS = [DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
+
+ def __init__(self, consumers):
+ self.consumers = consumers
+ self.indent = 0
+ self.in_progress = None
+ self.in_progress_hanging = False
+
+ def debug(self, msg, *args, **kw):
+ self.log(self.DEBUG, msg, *args, **kw)
+
+ def info(self, msg, *args, **kw):
+ self.log(self.INFO, msg, *args, **kw)
+
+ def notify(self, msg, *args, **kw):
+ self.log(self.NOTIFY, msg, *args, **kw)
+
+ def warn(self, msg, *args, **kw):
+ self.log(self.WARN, msg, *args, **kw)
+
+ def error(self, msg, *args, **kw):
+ self.log(self.ERROR, msg, *args, **kw)
+
+ def fatal(self, msg, *args, **kw):
+ self.log(self.FATAL, msg, *args, **kw)
+
+ def log(self, level, msg, *args, **kw):
+ if args:
+ if kw:
+ raise TypeError(
+ "You may give positional or keyword arguments, not both")
+ args = args or kw
+ rendered = None
+ for consumer_level, consumer in self.consumers:
+ if self.level_matches(level, consumer_level):
+ if (self.in_progress_hanging
+ and consumer in (sys.stdout, sys.stderr)):
+ self.in_progress_hanging = False
+ sys.stdout.write('\n')
+ sys.stdout.flush()
+ if rendered is None:
+ if args:
+ rendered = msg % args
+ else:
+ rendered = msg
+ rendered = ' '*self.indent + rendered
+ if hasattr(consumer, 'write'):
+ consumer.write(rendered+'\n')
+ else:
+ consumer(rendered)
+
+ def start_progress(self, msg):
+ assert not self.in_progress, (
+ "Tried to start_progress(%r) while in_progress %r"
+ % (msg, self.in_progress))
+ if self.level_matches(self.NOTIFY, self._stdout_level()):
+ sys.stdout.write(msg)
+ sys.stdout.flush()
+ self.in_progress_hanging = True
+ else:
+ self.in_progress_hanging = False
+ self.in_progress = msg
+
+ def end_progress(self, msg='done.'):
+ assert self.in_progress, (
+ "Tried to end_progress without start_progress")
+ if self.stdout_level_matches(self.NOTIFY):
+ if not self.in_progress_hanging:
+ # Some message has been printed out since start_progress
+ sys.stdout.write('...' + self.in_progress + msg + '\n')
+ sys.stdout.flush()
+ else:
+ sys.stdout.write(msg + '\n')
+ sys.stdout.flush()
+ self.in_progress = None
+ self.in_progress_hanging = False
+
+ def show_progress(self):
+ """If we are in a progress scope, and no log messages have been
+ shown, write out another '.'"""
+ if self.in_progress_hanging:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+
+ def stdout_level_matches(self, level):
+ """Returns true if a message at this level will go to stdout"""
+ return self.level_matches(level, self._stdout_level())
+
+ def _stdout_level(self):
+ """Returns the level that stdout runs at"""
+ for level, consumer in self.consumers:
+ if consumer is sys.stdout:
+ return level
+ return self.FATAL
+
+ def level_matches(self, level, consumer_level):
+ """
+ >>> l = Logger([])
+ >>> l.level_matches(3, 4)
+ False
+ >>> l.level_matches(3, 2)
+ True
+ >>> l.level_matches(slice(None, 3), 3)
+ False
+ >>> l.level_matches(slice(None, 3), 2)
+ True
+ >>> l.level_matches(slice(1, 3), 1)
+ True
+ >>> l.level_matches(slice(2, 3), 1)
+ False
+ """
+ if isinstance(level, slice):
+ start, stop = level.start, level.stop
+ if start is not None and start > consumer_level:
+ return False
+ if stop is not None and stop <= consumer_level:
+ return False
+ return True
+ else:
+ return level >= consumer_level
+
+ #@classmethod
+ def level_for_integer(cls, level):
+ levels = cls.LEVELS
+ if level < 0:
+ return levels[0]
+ if level >= len(levels):
+ return levels[-1]
+ return levels[level]
+
+ level_for_integer = classmethod(level_for_integer)
+
+# create a silent logger just to prevent this from being undefined
+# will be overridden with requested verbosity main() is called.
+logger = Logger([(Logger.LEVELS[-1], sys.stdout)])
+
+def mkdir(path):
+ if not os.path.exists(path):
+ logger.info('Creating %s', path)
+ os.makedirs(path)
+ else:
+ logger.info('Directory %s already exists', path)
+
+def copyfileordir(src, dest, symlink=True):
+ if os.path.isdir(src):
+ shutil.copytree(src, dest, symlink)
+ else:
+ shutil.copy2(src, dest)
+
+def copyfile(src, dest, symlink=True):
+ if not os.path.exists(src):
+ # Some bad symlink in the src
+ logger.warn('Cannot find file %s (bad symlink)', src)
+ return
+ if os.path.exists(dest):
+ logger.debug('File %s already exists', dest)
+ return
+ if not os.path.exists(os.path.dirname(dest)):
+ logger.info('Creating parent directories for %s', os.path.dirname(dest))
+ os.makedirs(os.path.dirname(dest))
+ if not os.path.islink(src):
+ srcpath = os.path.abspath(src)
+ else:
+ srcpath = os.readlink(src)
+ if symlink and hasattr(os, 'symlink') and not is_win:
+ logger.info('Symlinking %s', dest)
+ try:
+ os.symlink(srcpath, dest)
+ except (OSError, NotImplementedError):
+ logger.info('Symlinking failed, copying to %s', dest)
+ copyfileordir(src, dest, symlink)
+ else:
+ logger.info('Copying to %s', dest)
+ copyfileordir(src, dest, symlink)
+
+def writefile(dest, content, overwrite=True):
+ if not os.path.exists(dest):
+ logger.info('Writing %s', dest)
+ with open(dest, 'wb') as f:
+ f.write(content.encode('utf-8'))
+ return
+ else:
+ with open(dest, 'rb') as f:
+ c = f.read()
+ if c != content.encode("utf-8"):
+ if not overwrite:
+ logger.notify('File %s exists with different content; not overwriting', dest)
+ return
+ logger.notify('Overwriting %s with new content', dest)
+ with open(dest, 'wb') as f:
+ f.write(content.encode('utf-8'))
+ else:
+ logger.info('Content %s already in place', dest)
+
+def rmtree(dir):
+ if os.path.exists(dir):
+ logger.notify('Deleting tree %s', dir)
+ shutil.rmtree(dir)
+ else:
+ logger.info('Do not need to delete %s; already gone', dir)
+
+def make_exe(fn):
+ if hasattr(os, 'chmod'):
+ oldmode = os.stat(fn).st_mode & 0xFFF # 0o7777
+ newmode = (oldmode | 0x16D) & 0xFFF # 0o555, 0o7777
+ os.chmod(fn, newmode)
+ logger.info('Changed mode of %s to %s', fn, oct(newmode))
+
+def _find_file(filename, dirs):
+ for dir in reversed(dirs):
+ files = glob.glob(os.path.join(dir, filename))
+ if files and os.path.isfile(files[0]):
+ return True, files[0]
+ return False, filename
+
+def file_search_dirs():
+ here = os.path.dirname(os.path.abspath(__file__))
+ dirs = [here, join(here, 'virtualenv_support')]
+ if os.path.splitext(os.path.dirname(__file__))[0] != 'virtualenv':
+ # Probably some boot script; just in case virtualenv is installed...
+ try:
+ import virtualenv
+ except ImportError:
+ pass
+ else:
+ dirs.append(os.path.join(
+ os.path.dirname(virtualenv.__file__), 'virtualenv_support'))
+ return [d for d in dirs if os.path.isdir(d)]
+
+
+class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
+ """
+ Custom help formatter for use in ConfigOptionParser that updates
+ the defaults before expanding them, allowing them to show up correctly
+ in the help listing
+ """
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser.update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class ConfigOptionParser(optparse.OptionParser):
+ """
+ Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables
+ """
+ def __init__(self, *args, **kwargs):
+ self.config = ConfigParser.RawConfigParser()
+ self.files = self.get_config_files()
+ self.config.read(self.files)
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def get_config_files(self):
+ config_file = os.environ.get('VIRTUALENV_CONFIG_FILE', False)
+ if config_file and os.path.exists(config_file):
+ return [config_file]
+ return [default_config_file]
+
+ def update_defaults(self, defaults):
+ """
+ Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists).
+ """
+ # Then go and look for the other sources of configuration:
+ config = {}
+ # 1. config files
+ config.update(dict(self.get_config_section('virtualenv')))
+ # 2. environmental variables
+ config.update(dict(self.get_environ_vars()))
+ # Then set the options with those values
+ for key, val in config.items():
+ key = key.replace('_', '-')
+ if not key.startswith('--'):
+ key = '--%s' % key # only prefer long opts
+ option = self.get_option(key)
+ if option is not None:
+ # ignore empty values
+ if not val:
+ continue
+ # handle multiline configs
+ if option.action == 'append':
+ val = val.split()
+ else:
+ option.nargs = 1
+ if option.action == 'store_false':
+ val = not strtobool(val)
+ elif option.action in ('store_true', 'count'):
+ val = strtobool(val)
+ try:
+ val = option.convert_value(key, val)
+ except optparse.OptionValueError:
+ e = sys.exc_info()[1]
+ print("An error occurred during configuration: %s" % e)
+ sys.exit(3)
+ defaults[option.dest] = val
+ return defaults
+
+ def get_config_section(self, name):
+ """
+ Get a section of a configuration
+ """
+ if self.config.has_section(name):
+ return self.config.items(name)
+ return []
+
+ def get_environ_vars(self, prefix='VIRTUALENV_'):
+ """
+ Returns a generator with all environmental vars with prefix VIRTUALENV
+ """
+ for key, val in os.environ.items():
+ if key.startswith(prefix):
+ yield (key.replace(prefix, '').lower(), val)
+
+ def get_default_values(self):
+ """
+ Overridding to make updating the defaults after instantiation of
+ the option parser possible, update_defaults() does the dirty work.
+ """
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ defaults = self.update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, basestring):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+
+def main():
+ parser = ConfigOptionParser(
+ version=virtualenv_version,
+ usage="%prog [OPTIONS] DEST_DIR",
+ formatter=UpdatingDefaultsHelpFormatter())
+
+ parser.add_option(
+ '-v', '--verbose',
+ action='count',
+ dest='verbose',
+ default=0,
+ help="Increase verbosity.")
+
+ parser.add_option(
+ '-q', '--quiet',
+ action='count',
+ dest='quiet',
+ default=0,
+ help='Decrease verbosity.')
+
+ parser.add_option(
+ '-p', '--python',
+ dest='python',
+ metavar='PYTHON_EXE',
+ help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 '
+ 'interpreter to create the new environment. The default is the interpreter that '
+ 'virtualenv was installed with (%s)' % sys.executable)
+
+ parser.add_option(
+ '--clear',
+ dest='clear',
+ action='store_true',
+ help="Clear out the non-root install and start from scratch.")
+
+ parser.set_defaults(system_site_packages=False)
+ parser.add_option(
+ '--no-site-packages',
+ dest='system_site_packages',
+ action='store_false',
+ help="DEPRECATED. Retained only for backward compatibility. "
+ "Not having access to global site-packages is now the default behavior.")
+
+ parser.add_option(
+ '--system-site-packages',
+ dest='system_site_packages',
+ action='store_true',
+ help="Give the virtual environment access to the global site-packages.")
+
+ parser.add_option(
+ '--always-copy',
+ dest='symlink',
+ action='store_false',
+ default=True,
+ help="Always copy files rather than symlinking.")
+
+ parser.add_option(
+ '--unzip-setuptools',
+ dest='unzip_setuptools',
+ action='store_true',
+ help="Unzip Setuptools when installing it.")
+
+ parser.add_option(
+ '--relocatable',
+ dest='relocatable',
+ action='store_true',
+ help='Make an EXISTING virtualenv environment relocatable. '
+ 'This fixes up scripts and makes all .pth files relative.')
+
+ parser.add_option(
+ '--no-setuptools',
+ dest='no_setuptools',
+ action='store_true',
+ help='Do not install setuptools in the new virtualenv.')
+
+ parser.add_option(
+ '--no-pip',
+ dest='no_pip',
+ action='store_true',
+ help='Do not install pip in the new virtualenv.')
+
+ parser.add_option(
+ '--no-wheel',
+ dest='no_wheel',
+ action='store_true',
+ help='Do not install wheel in the new virtualenv.')
+
+ default_search_dirs = file_search_dirs()
+ parser.add_option(
+ '--extra-search-dir',
+ dest="search_dirs",
+ action="append",
+ metavar='DIR',
+ default=default_search_dirs,
+ help="Directory to look for setuptools/pip distributions in. "
+ "This option can be used multiple times.")
+
+ parser.add_option(
+ "--download",
+ dest="download",
+ default=True,
+ action="store_true",
+ help="Download preinstalled packages from PyPI.",
+ )
+
+ parser.add_option(
+ "--no-download",
+ '--never-download',
+ dest="download",
+ action="store_false",
+ help="Do not download preinstalled packages from PyPI.",
+ )
+
+ parser.add_option(
+ '--prompt',
+ dest='prompt',
+ help='Provides an alternative prompt prefix for this environment.')
+
+ parser.add_option(
+ '--setuptools',
+ dest='setuptools',
+ action='store_true',
+ help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+ parser.add_option(
+ '--distribute',
+ dest='distribute',
+ action='store_true',
+ help="DEPRECATED. Retained only for backward compatibility. This option has no effect.")
+
+ if 'extend_parser' in globals():
+ extend_parser(parser)
+
+ options, args = parser.parse_args()
+
+ global logger
+
+ if 'adjust_options' in globals():
+ adjust_options(options, args)
+
+ verbosity = options.verbose - options.quiet
+ logger = Logger([(Logger.level_for_integer(2 - verbosity), sys.stdout)])
+
+ if options.python and not os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
+ env = os.environ.copy()
+ interpreter = resolve_interpreter(options.python)
+ if interpreter == sys.executable:
+ logger.warn('Already using interpreter %s' % interpreter)
+ else:
+ logger.notify('Running virtualenv with interpreter %s' % interpreter)
+ env['VIRTUALENV_INTERPRETER_RUNNING'] = 'true'
+ file = __file__
+ if file.endswith('.pyc'):
+ file = file[:-1]
+ popen = subprocess.Popen([interpreter, file] + sys.argv[1:], env=env)
+ raise SystemExit(popen.wait())
+
+ if not args:
+ print('You must provide a DEST_DIR')
+ parser.print_help()
+ sys.exit(2)
+ if len(args) > 1:
+ print('There must be only one argument: DEST_DIR (you gave %s)' % (
+ ' '.join(args)))
+ parser.print_help()
+ sys.exit(2)
+
+ home_dir = args[0]
+
+ if os.path.exists(home_dir) and os.path.isfile(home_dir):
+ logger.fatal('ERROR: File already exists and is not a directory.')
+ logger.fatal('Please provide a different path or delete the file.')
+ sys.exit(3)
+
+ if os.environ.get('WORKING_ENV'):
+ logger.fatal('ERROR: you cannot run virtualenv while in a workingenv')
+ logger.fatal('Please deactivate your workingenv, then re-run this script')
+ sys.exit(3)
+
+ if 'PYTHONHOME' in os.environ:
+ logger.warn('PYTHONHOME is set. You *must* activate the virtualenv before using it')
+ del os.environ['PYTHONHOME']
+
+ if options.relocatable:
+ make_environment_relocatable(home_dir)
+ return
+
+ create_environment(home_dir,
+ site_packages=options.system_site_packages,
+ clear=options.clear,
+ unzip_setuptools=options.unzip_setuptools,
+ prompt=options.prompt,
+ search_dirs=options.search_dirs,
+ download=options.download,
+ no_setuptools=options.no_setuptools,
+ no_pip=options.no_pip,
+ no_wheel=options.no_wheel,
+ symlink=options.symlink and hasattr(os, 'symlink')) # MOZ: Make sure we don't use symlink when we don't have it
+ if 'after_install' in globals():
+ after_install(options, home_dir)
+
+def call_subprocess(cmd, show_stdout=True,
+ filter_stdout=None, cwd=None,
+ raise_on_returncode=True, extra_env=None,
+ remove_from_env=None, stdin=None):
+ cmd_parts = []
+ for part in cmd:
+ if len(part) > 45:
+ part = part[:20]+"..."+part[-20:]
+ if ' ' in part or '\n' in part or '"' in part or "'" in part:
+ part = '"%s"' % part.replace('"', '\\"')
+ if hasattr(part, 'decode'):
+ try:
+ part = part.decode(sys.getdefaultencoding())
+ except UnicodeDecodeError:
+ part = part.decode(sys.getfilesystemencoding())
+ cmd_parts.append(part)
+ cmd_desc = ' '.join(cmd_parts)
+ if show_stdout:
+ stdout = None
+ else:
+ stdout = subprocess.PIPE
+ logger.debug("Running command %s" % cmd_desc)
+ if extra_env or remove_from_env:
+ env = os.environ.copy()
+ if extra_env:
+ env.update(extra_env)
+ if remove_from_env:
+ for varname in remove_from_env:
+ env.pop(varname, None)
+ else:
+ env = None
+ try:
+ proc = subprocess.Popen(
+ cmd, stderr=subprocess.STDOUT,
+ stdin=None if stdin is None else subprocess.PIPE,
+ stdout=stdout,
+ cwd=cwd, env=env)
+ except Exception:
+ e = sys.exc_info()[1]
+ logger.fatal(
+ "Error %s while executing command %s" % (e, cmd_desc))
+ raise
+ all_output = []
+ if stdout is not None:
+ if stdin is not None:
+ proc.stdin.write(stdin)
+ proc.stdin.close()
+
+ stdout = proc.stdout
+ encoding = sys.getdefaultencoding()
+ fs_encoding = sys.getfilesystemencoding()
+ while 1:
+ line = stdout.readline()
+ try:
+ line = line.decode(encoding)
+ except UnicodeDecodeError:
+ line = line.decode(fs_encoding)
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line)
+ if filter_stdout:
+ level = filter_stdout(line)
+ if isinstance(level, tuple):
+ level, line = level
+ logger.log(level, line)
+ if not logger.stdout_level_matches(level):
+ logger.show_progress()
+ else:
+ logger.info(line)
+ else:
+ proc.communicate(stdin)
+ proc.wait()
+ if proc.returncode:
+ if raise_on_returncode:
+ if all_output:
+ logger.notify('Complete output from command %s:' % cmd_desc)
+ logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+ raise OSError(
+ "Command %s failed with error code %s"
+ % (cmd_desc, proc.returncode))
+ else:
+ logger.warn(
+ "Command %s had error code %s"
+ % (cmd_desc, proc.returncode))
+
+def filter_install_output(line):
+ if line.strip().startswith('running'):
+ return Logger.INFO
+ return Logger.DEBUG
+
+def find_wheels(projects, search_dirs):
+ """Find wheels from which we can import PROJECTS.
+
+ Scan through SEARCH_DIRS for a wheel for each PROJECT in turn. Return
+ a list of the first wheel found for each PROJECT
+ """
+
+ wheels = []
+
+ # Look through SEARCH_DIRS for the first suitable wheel. Don't bother
+ # about version checking here, as this is simply to get something we can
+ # then use to install the correct version.
+ for project in projects:
+ for dirname in search_dirs:
+ # This relies on only having "universal" wheels available.
+ # The pattern could be tightened to require -py2.py3-none-any.whl.
+ files = glob.glob(os.path.join(dirname, project + '-*.whl'))
+ if files:
+ wheels.append(os.path.abspath(files[0]))
+ break
+ else:
+ # We're out of luck, so quit with a suitable error
+ logger.fatal('Cannot find a wheel for %s' % (project,))
+
+ return wheels
+
+def install_wheel(project_names, py_executable, search_dirs=None,
+ download=False):
+ if search_dirs is None:
+ search_dirs = file_search_dirs()
+
+ wheels = find_wheels(['setuptools', 'pip'], search_dirs)
+ pythonpath = os.pathsep.join(wheels)
+
+ # PIP_FIND_LINKS uses space as the path separator and thus cannot have paths
+ # with spaces in them. Convert any of those to local file:// URL form.
+ try:
+ from urlparse import urljoin
+ from urllib import pathname2url
+ except ImportError:
+ from urllib.parse import urljoin
+ from urllib.request import pathname2url
+ def space_path2url(p):
+ if ' ' not in p:
+ return p
+ return urljoin('file:', pathname2url(os.path.abspath(p)))
+ findlinks = ' '.join(space_path2url(d) for d in search_dirs)
+
+ SCRIPT = textwrap.dedent("""
+ import sys
+ import pkgutil
+ import tempfile
+ import os
+
+ import pip
+
+ cert_data = pkgutil.get_data("pip._vendor.requests", "cacert.pem")
+ if cert_data is not None:
+ cert_file = tempfile.NamedTemporaryFile(delete=False)
+ cert_file.write(cert_data)
+ cert_file.close()
+ else:
+ cert_file = None
+
+ try:
+ args = ["install", "--ignore-installed"]
+ if cert_file is not None:
+ args += ["--cert", cert_file.name]
+ args += sys.argv[1:]
+
+ sys.exit(pip.main(args))
+ finally:
+ if cert_file is not None:
+ os.remove(cert_file.name)
+ """).encode("utf8")
+
+ cmd = [py_executable, '-'] + project_names
+ logger.start_progress('Installing %s...' % (', '.join(project_names)))
+ logger.indent += 2
+
+ env = {
+ "PYTHONPATH": pythonpath,
+ "JYTHONPATH": pythonpath, # for Jython < 3.x
+ "PIP_FIND_LINKS": findlinks,
+ "PIP_USE_WHEEL": "1",
+ "PIP_ONLY_BINARY": ":all:",
+ "PIP_PRE": "1",
+ "PIP_USER": "0",
+ }
+
+ if not download:
+ env["PIP_NO_INDEX"] = "1"
+
+ try:
+ call_subprocess(cmd, show_stdout=False, extra_env=env, stdin=SCRIPT)
+ finally:
+ logger.indent -= 2
+ logger.end_progress()
+
+
+def create_environment(home_dir, site_packages=False, clear=False,
+ unzip_setuptools=False,
+ prompt=None, search_dirs=None, download=False,
+ no_setuptools=False, no_pip=False, no_wheel=False,
+ symlink=True):
+ """
+ Creates a new environment in ``home_dir``.
+
+ If ``site_packages`` is true, then the global ``site-packages/``
+ directory will be on the path.
+
+ If ``clear`` is true (default False) then the environment will
+ first be cleared.
+ """
+ home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+
+ py_executable = os.path.abspath(install_python(
+ home_dir, lib_dir, inc_dir, bin_dir,
+ site_packages=site_packages, clear=clear, symlink=symlink))
+
+ install_distutils(home_dir)
+
+ to_install = []
+
+ if not no_setuptools:
+ to_install.append('setuptools')
+
+ if not no_pip:
+ to_install.append('pip')
+
+ if not no_wheel:
+ to_install.append('wheel')
+
+ if to_install:
+ install_wheel(
+ to_install,
+ py_executable,
+ search_dirs,
+ download=download,
+ )
+
+ install_activate(home_dir, bin_dir, prompt)
+
+ install_python_config(home_dir, bin_dir, prompt)
+
+def is_executable_file(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+def path_locations(home_dir):
+ """Return the path locations for the environment (where libraries are,
+ where scripts go, etc)"""
+ home_dir = os.path.abspath(home_dir)
+ # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its
+ # prefix arg is broken: http://bugs.python.org/issue3386
+ if is_win:
+ # Windows has lots of problems with executables with spaces in
+ # the name; this function will remove them (using the ~1
+ # format):
+ mkdir(home_dir)
+ if ' ' in home_dir:
+ import ctypes
+ GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
+ size = max(len(home_dir)+1, 256)
+ buf = ctypes.create_unicode_buffer(size)
+ try:
+ u = unicode
+ except NameError:
+ u = str
+ ret = GetShortPathName(u(home_dir), buf, size)
+ if not ret:
+ print('Error: the path "%s" has a space in it' % home_dir)
+ print('We could not determine the short pathname for it.')
+ print('Exiting.')
+ sys.exit(3)
+ home_dir = str(buf.value)
+ lib_dir = join(home_dir, 'Lib')
+ inc_dir = join(home_dir, 'Include')
+ bin_dir = join(home_dir, 'Scripts')
+ if is_jython:
+ lib_dir = join(home_dir, 'Lib')
+ inc_dir = join(home_dir, 'Include')
+ bin_dir = join(home_dir, 'bin')
+ elif is_pypy:
+ lib_dir = home_dir
+ inc_dir = join(home_dir, 'include')
+ bin_dir = join(home_dir, 'bin')
+ elif not is_win:
+ lib_dir = join(home_dir, 'lib', py_version)
+ inc_dir = join(home_dir, 'include', py_version + abiflags)
+ bin_dir = join(home_dir, 'bin')
+ return home_dir, lib_dir, inc_dir, bin_dir
+
+
+def change_prefix(filename, dst_prefix):
+ prefixes = [sys.prefix]
+
+ if is_darwin:
+ prefixes.extend((
+ os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(sys.prefix, "Extras", "lib", "python"),
+ os.path.join("~", "Library", "Python", sys.version[:3], "site-packages"),
+ # Python 2.6 no-frameworks
+ os.path.join("~", ".local", "lib","python", sys.version[:3], "site-packages"),
+ # System Python 2.7 on OSX Mountain Lion
+ os.path.join("~", "Library", "Python", sys.version[:3], "lib", "python", "site-packages")))
+
+ if hasattr(sys, 'real_prefix'):
+ prefixes.append(sys.real_prefix)
+ if hasattr(sys, 'base_prefix'):
+ prefixes.append(sys.base_prefix)
+ prefixes = list(map(os.path.expanduser, prefixes))
+ prefixes = list(map(os.path.abspath, prefixes))
+ # Check longer prefixes first so we don't split in the middle of a filename
+ prefixes = sorted(prefixes, key=len, reverse=True)
+ filename = os.path.abspath(filename)
+ # On Windows, make sure drive letter is uppercase
+ if is_win and filename[0] in 'abcdefghijklmnopqrstuvwxyz':
+ filename = filename[0].upper() + filename[1:]
+ for i, prefix in enumerate(prefixes):
+ if is_win and prefix[0] in 'abcdefghijklmnopqrstuvwxyz':
+ prefixes[i] = prefix[0].upper() + prefix[1:]
+ for src_prefix in prefixes:
+ if filename.startswith(src_prefix):
+ _, relpath = filename.split(src_prefix, 1)
+ if src_prefix != os.sep: # sys.prefix == "/"
+ assert relpath[0] == os.sep
+ relpath = relpath[1:]
+ return join(dst_prefix, relpath)
+ assert False, "Filename %s does not start with any of these prefixes: %s" % \
+ (filename, prefixes)
+
+def copy_required_modules(dst_prefix, symlink):
+ import imp
+
+ for modname in REQUIRED_MODULES:
+ if modname in sys.builtin_module_names:
+ logger.info("Ignoring built-in bootstrap module: %s" % modname)
+ continue
+ try:
+ f, filename, _ = imp.find_module(modname)
+ except ImportError:
+ logger.info("Cannot import bootstrap module: %s" % modname)
+ else:
+ if f is not None:
+ f.close()
+ # special-case custom readline.so on OS X, but not for pypy:
+ if modname == 'readline' and sys.platform == 'darwin' and not (
+ is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))):
+ dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so')
+ elif modname == 'readline' and sys.platform == 'win32':
+ # special-case for Windows, where readline is not a
+ # standard module, though it may have been installed in
+ # site-packages by a third-party package
+ pass
+ else:
+ dst_filename = change_prefix(filename, dst_prefix)
+ copyfile(filename, dst_filename, symlink)
+ if filename.endswith('.pyc'):
+ pyfile = filename[:-1]
+ if os.path.exists(pyfile):
+ copyfile(pyfile, dst_filename[:-1], symlink)
+
+
+def subst_path(prefix_path, prefix, home_dir):
+ prefix_path = os.path.normpath(prefix_path)
+ prefix = os.path.normpath(prefix)
+ home_dir = os.path.normpath(home_dir)
+ if not prefix_path.startswith(prefix):
+ logger.warn('Path not in prefix %r %r', prefix_path, prefix)
+ return
+ return prefix_path.replace(prefix, home_dir, 1)
+
+
+def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True):
+ """Install just the base environment, no distutils patches etc"""
+ if sys.executable.startswith(bin_dir):
+ print('Please use the *system* python to run this script')
+ return
+
+ if clear:
+ rmtree(lib_dir)
+ ## FIXME: why not delete it?
+ ## Maybe it should delete everything with #!/path/to/venv/python in it
+ logger.notify('Not deleting %s', bin_dir)
+
+ if hasattr(sys, 'real_prefix'):
+ logger.notify('Using real prefix %r' % sys.real_prefix)
+ prefix = sys.real_prefix
+ elif hasattr(sys, 'base_prefix'):
+ logger.notify('Using base prefix %r' % sys.base_prefix)
+ prefix = sys.base_prefix
+ else:
+ prefix = sys.prefix
+ mkdir(lib_dir)
+ fix_lib64(lib_dir, symlink)
+ stdlib_dirs = [os.path.dirname(os.__file__)]
+ if is_win:
+ stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs'))
+ elif is_darwin:
+ stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages'))
+ if hasattr(os, 'symlink'):
+ logger.info('Symlinking Python bootstrap modules')
+ else:
+ logger.info('Copying Python bootstrap modules')
+ logger.indent += 2
+ try:
+ # copy required files...
+ for stdlib_dir in stdlib_dirs:
+ if not os.path.isdir(stdlib_dir):
+ continue
+ for fn in os.listdir(stdlib_dir):
+ bn = os.path.splitext(fn)[0]
+ if fn != 'site-packages' and bn in REQUIRED_FILES:
+ copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink)
+ # ...and modules
+ copy_required_modules(home_dir, symlink)
+ finally:
+ logger.indent -= 2
+ mkdir(join(lib_dir, 'site-packages'))
+ import site
+ site_filename = site.__file__
+ if site_filename.endswith('.pyc') or site_filename.endswith('.pyo'):
+ site_filename = site_filename[:-1]
+ elif site_filename.endswith('$py.class'):
+ site_filename = site_filename.replace('$py.class', '.py')
+ site_filename_dst = change_prefix(site_filename, home_dir)
+ site_dir = os.path.dirname(site_filename_dst)
+ # MOZ: Copies a site.py if it exists instead of using the one hex encoded in
+ # this file. Necessary for some site.py fixes for MinGW64 version of python
+ site_py_src_path = os.path.join(os.path.dirname(__file__), 'site.py')
+ if os.path.isfile(site_py_src_path):
+ shutil.copy(site_py_src_path, site_filename_dst)
+ else:
+ writefile(site_filename_dst, SITE_PY)
+ writefile(join(site_dir, 'orig-prefix.txt'), prefix)
+ site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
+ if not site_packages:
+ writefile(site_packages_filename, '')
+
+ if is_pypy or is_win:
+ stdinc_dir = join(prefix, 'include')
+ else:
+ stdinc_dir = join(prefix, 'include', py_version + abiflags)
+ if os.path.exists(stdinc_dir):
+ copyfile(stdinc_dir, inc_dir, symlink)
+ else:
+ logger.debug('No include dir %s' % stdinc_dir)
+
+ platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
+ if platinc_dir != stdinc_dir:
+ platinc_dest = distutils.sysconfig.get_python_inc(
+ plat_specific=1, prefix=home_dir)
+ if platinc_dir == platinc_dest:
+ # Do platinc_dest manually due to a CPython bug;
+ # not http://bugs.python.org/issue3386 but a close cousin
+ platinc_dest = subst_path(platinc_dir, prefix, home_dir)
+ if platinc_dest:
+ # PyPy's stdinc_dir and prefix are relative to the original binary
+ # (traversing virtualenvs), whereas the platinc_dir is relative to
+ # the inner virtualenv and ignores the prefix argument.
+ # This seems more evolved than designed.
+ copyfile(platinc_dir, platinc_dest, symlink)
+
+ # pypy never uses exec_prefix, just ignore it
+ if sys.exec_prefix != prefix and not is_pypy:
+ if is_win:
+ exec_dir = join(sys.exec_prefix, 'lib')
+ elif is_jython:
+ exec_dir = join(sys.exec_prefix, 'Lib')
+ else:
+ exec_dir = join(sys.exec_prefix, 'lib', py_version)
+ for fn in os.listdir(exec_dir):
+ copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink)
+
+ if is_jython:
+ # Jython has either jython-dev.jar and javalib/ dir, or just
+ # jython.jar
+ for name in 'jython-dev.jar', 'javalib', 'jython.jar':
+ src = join(prefix, name)
+ if os.path.exists(src):
+ copyfile(src, join(home_dir, name), symlink)
+ # XXX: registry should always exist after Jython 2.5rc1
+ src = join(prefix, 'registry')
+ if os.path.exists(src):
+ copyfile(src, join(home_dir, 'registry'), symlink=False)
+ copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'),
+ symlink=False)
+
+ mkdir(bin_dir)
+ py_executable = join(bin_dir, os.path.basename(sys.executable))
+ if 'Python.framework' in prefix:
+ # OS X framework builds cause validation to break
+ # https://github.com/pypa/virtualenv/issues/322
+ if os.environ.get('__PYVENV_LAUNCHER__'):
+ del os.environ["__PYVENV_LAUNCHER__"]
+ if re.search(r'/Python(?:-32|-64)*$', py_executable):
+ # The name of the python executable is not quite what
+ # we want, rename it.
+ py_executable = os.path.join(
+ os.path.dirname(py_executable), 'python')
+
+ logger.notify('New %s executable in %s', expected_exe, py_executable)
+ pcbuild_dir = os.path.dirname(sys.executable)
+ pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
+ if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
+ logger.notify('Detected python running from build directory %s', pcbuild_dir)
+ logger.notify('Writing .pth file linking to build directory for *.pyd files')
+ writefile(pyd_pth, pcbuild_dir)
+ else:
+ pcbuild_dir = None
+ if os.path.exists(pyd_pth):
+ logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
+ os.unlink(pyd_pth)
+
+ if sys.executable != py_executable:
+ ## FIXME: could I just hard link?
+ executable = sys.executable
+ shutil.copyfile(executable, py_executable)
+ make_exe(py_executable)
+ if is_win or is_cygwin:
+ pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
+ if os.path.exists(pythonw):
+ logger.info('Also created pythonw.exe')
+ shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe'))
+ python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
+ python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
+ if os.path.exists(python_d):
+ logger.info('Also created python_d.exe')
+ shutil.copyfile(python_d, python_d_dest)
+ elif os.path.exists(python_d_dest):
+ logger.info('Removed python_d.exe as it is no longer at the source')
+ os.unlink(python_d_dest)
+ # we need to copy the DLL to enforce that windows will load the correct one.
+ # may not exist if we are cygwin.
+ py_executable_dll = 'python%s%s.dll' % (
+ sys.version_info[0], sys.version_info[1])
+ py_executable_dll_d = 'python%s%s_d.dll' % (
+ sys.version_info[0], sys.version_info[1])
+ pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
+ pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
+ pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
+ if os.path.exists(pythondll):
+ logger.info('Also created %s' % py_executable_dll)
+ shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll))
+ if os.path.exists(pythondll_d):
+ logger.info('Also created %s' % py_executable_dll_d)
+ shutil.copyfile(pythondll_d, pythondll_d_dest)
+ elif os.path.exists(pythondll_d_dest):
+ logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
+ os.unlink(pythondll_d_dest)
+ if is_pypy:
+ # make a symlink python --> pypy-c
+ python_executable = os.path.join(os.path.dirname(py_executable), 'python')
+ if sys.platform in ('win32', 'cygwin'):
+ python_executable += '.exe'
+ logger.info('Also created executable %s' % python_executable)
+ copyfile(py_executable, python_executable, symlink)
+
+ if is_win:
+ for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll',
+ 'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll',
+ 'tcl85.dll', 'tk85.dll']:
+ src = join(prefix, name)
+ if os.path.exists(src):
+ copyfile(src, join(bin_dir, name), symlink)
+
+ for d in sys.path:
+ if d.endswith('lib_pypy'):
+ break
+ else:
+ logger.fatal('Could not find lib_pypy in sys.path')
+ raise SystemExit(3)
+ logger.info('Copying lib_pypy')
+ copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink)
+
+ if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
+ secondary_exe = os.path.join(os.path.dirname(py_executable),
+ expected_exe)
+ py_executable_ext = os.path.splitext(py_executable)[1]
+ if py_executable_ext.lower() == '.exe':
+ # python2.4 gives an extension of '.4' :P
+ secondary_exe += py_executable_ext
+ if os.path.exists(secondary_exe):
+ logger.warn('Not overwriting existing %s script %s (you must use %s)'
+ % (expected_exe, secondary_exe, py_executable))
+ else:
+ logger.notify('Also creating executable in %s' % secondary_exe)
+ shutil.copyfile(sys.executable, secondary_exe)
+ make_exe(secondary_exe)
+
+ if '.framework' in prefix:
+ if 'Python.framework' in prefix:
+ logger.debug('MacOSX Python framework detected')
+ # Make sure we use the embedded interpreter inside
+ # the framework, even if sys.executable points to
+ # the stub executable in ${sys.prefix}/bin
+ # See http://groups.google.com/group/python-virtualenv/
+ # browse_thread/thread/17cab2f85da75951
+ original_python = os.path.join(
+ prefix, 'Resources/Python.app/Contents/MacOS/Python')
+ if 'EPD' in prefix:
+ logger.debug('EPD framework detected')
+ original_python = os.path.join(prefix, 'bin/python')
+ shutil.copy(original_python, py_executable)
+
+ # Copy the framework's dylib into the virtual
+ # environment
+ virtual_lib = os.path.join(home_dir, '.Python')
+
+ if os.path.exists(virtual_lib):
+ os.unlink(virtual_lib)
+ copyfile(
+ os.path.join(prefix, 'Python'),
+ virtual_lib,
+ symlink)
+
+ # And then change the install_name of the copied python executable
+ try:
+ mach_o_change(py_executable,
+ os.path.join(prefix, 'Python'),
+ '@executable_path/../.Python')
+ except:
+ e = sys.exc_info()[1]
+ logger.warn("Could not call mach_o_change: %s. "
+ "Trying to call install_name_tool instead." % e)
+ try:
+ call_subprocess(
+ ["install_name_tool", "-change",
+ os.path.join(prefix, 'Python'),
+ '@executable_path/../.Python',
+ py_executable])
+ except:
+ logger.fatal("Could not call install_name_tool -- you must "
+ "have Apple's development tools installed")
+ raise
+
+ if not is_win:
+ # Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
+ py_exe_version_major = 'python%s' % sys.version_info[0]
+ py_exe_version_major_minor = 'python%s.%s' % (
+ sys.version_info[0], sys.version_info[1])
+ py_exe_no_version = 'python'
+ required_symlinks = [ py_exe_no_version, py_exe_version_major,
+ py_exe_version_major_minor ]
+
+ py_executable_base = os.path.basename(py_executable)
+
+ if py_executable_base in required_symlinks:
+ # Don't try to symlink to yourself.
+ required_symlinks.remove(py_executable_base)
+
+ for pth in required_symlinks:
+ full_pth = join(bin_dir, pth)
+ if os.path.exists(full_pth):
+ os.unlink(full_pth)
+ if symlink:
+ os.symlink(py_executable_base, full_pth)
+ else:
+ copyfile(py_executable, full_pth, symlink)
+
+ if is_win and ' ' in py_executable:
+ # There's a bug with subprocess on Windows when using a first
+ # argument that has a space in it. Instead we have to quote
+ # the value:
+ py_executable = '"%s"' % py_executable
+ # NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
+ cmd = [py_executable, '-c', 'import sys;out=sys.stdout;'
+ 'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
+ logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
+ try:
+ proc = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE)
+ proc_stdout, proc_stderr = proc.communicate()
+ except OSError:
+ e = sys.exc_info()[1]
+ if e.errno == errno.EACCES:
+ logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
+ sys.exit(100)
+ else:
+ raise e
+
+ proc_stdout = proc_stdout.strip().decode("utf-8")
+ proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
+ norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
+ if hasattr(norm_home_dir, 'decode'):
+ norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding())
+ if proc_stdout != norm_home_dir:
+ logger.fatal(
+ 'ERROR: The executable %s is not functioning' % py_executable)
+ logger.fatal(
+ 'ERROR: It thinks sys.prefix is %r (should be %r)'
+ % (proc_stdout, norm_home_dir))
+ logger.fatal(
+ 'ERROR: virtualenv is not compatible with this system or executable')
+ if is_win:
+ logger.fatal(
+ 'Note: some Windows users have reported this error when they '
+ 'installed Python for "Only this user" or have multiple '
+ 'versions of Python installed. Copying the appropriate '
+ 'PythonXX.dll to the virtualenv Scripts/ directory may fix '
+ 'this problem.')
+ sys.exit(100)
+ else:
+ logger.info('Got sys.prefix result: %r' % proc_stdout)
+
+ pydistutils = os.path.expanduser('~/.pydistutils.cfg')
+ if os.path.exists(pydistutils):
+ logger.notify('Please make sure you remove any previous custom paths from '
+ 'your %s file.' % pydistutils)
+ ## FIXME: really this should be calculated earlier
+
+ fix_local_scheme(home_dir, symlink)
+
+ if site_packages:
+ if os.path.exists(site_packages_filename):
+ logger.info('Deleting %s' % site_packages_filename)
+ os.unlink(site_packages_filename)
+
+ return py_executable
+
+
+def install_activate(home_dir, bin_dir, prompt=None):
+ if is_win or is_jython and os._name == 'nt':
+ files = {
+ 'activate.bat': ACTIVATE_BAT,
+ 'deactivate.bat': DEACTIVATE_BAT,
+ 'activate.ps1': ACTIVATE_PS,
+ }
+
+ # MSYS needs paths of the form /c/path/to/file
+ drive, tail = os.path.splitdrive(home_dir.replace(os.sep, '/'))
+ home_dir_msys = (drive and "/%s%s" or "%s%s") % (drive[:1], tail)
+
+ # Run-time conditional enables (basic) Cygwin compatibility
+ home_dir_sh = ("""$(if [ "$OSTYPE" "==" "cygwin" ]; then cygpath -u '%s'; else echo '%s'; fi;)""" %
+ (home_dir, home_dir_msys))
+ files['activate'] = ACTIVATE_SH.replace('__VIRTUAL_ENV__', home_dir_sh)
+
+ else:
+ files = {'activate': ACTIVATE_SH}
+
+ # suppling activate.fish in addition to, not instead of, the
+ # bash script support.
+ files['activate.fish'] = ACTIVATE_FISH
+
+ # same for csh/tcsh support...
+ files['activate.csh'] = ACTIVATE_CSH
+
+ files['activate_this.py'] = ACTIVATE_THIS
+
+ install_files(home_dir, bin_dir, prompt, files)
+
+def install_files(home_dir, bin_dir, prompt, files):
+ if hasattr(home_dir, 'decode'):
+ home_dir = home_dir.decode(sys.getfilesystemencoding())
+ vname = os.path.basename(home_dir)
+ for name, content in files.items():
+ content = content.replace('__VIRTUAL_PROMPT__', prompt or '')
+ content = content.replace('__VIRTUAL_WINPROMPT__', prompt or '(%s)' % vname)
+ content = content.replace('__VIRTUAL_ENV__', home_dir)
+ content = content.replace('__VIRTUAL_NAME__', vname)
+ content = content.replace('__BIN_NAME__', os.path.basename(bin_dir))
+ writefile(os.path.join(bin_dir, name), content)
+
+def install_python_config(home_dir, bin_dir, prompt=None):
+ if sys.platform == 'win32' or is_jython and os._name == 'nt':
+ files = {}
+ else:
+ files = {'python-config': PYTHON_CONFIG}
+ install_files(home_dir, bin_dir, prompt, files)
+ for name, content in files.items():
+ make_exe(os.path.join(bin_dir, name))
+
+def install_distutils(home_dir):
+ distutils_path = change_prefix(distutils.__path__[0], home_dir)
+ mkdir(distutils_path)
+ ## FIXME: maybe this prefix setting should only be put in place if
+ ## there's a local distutils.cfg with a prefix setting?
+ home_dir = os.path.abspath(home_dir)
+ ## FIXME: this is breaking things, removing for now:
+ #distutils_cfg = DISTUTILS_CFG + "\n[install]\nprefix=%s\n" % home_dir
+ writefile(os.path.join(distutils_path, '__init__.py'), DISTUTILS_INIT)
+ writefile(os.path.join(distutils_path, 'distutils.cfg'), DISTUTILS_CFG, overwrite=False)
+
+def fix_local_scheme(home_dir, symlink=True):
+ """
+ Platforms that use the "posix_local" install scheme (like Ubuntu with
+ Python 2.7) need to be given an additional "local" location, sigh.
+ """
+ try:
+ import sysconfig
+ except ImportError:
+ pass
+ else:
+ if sysconfig._get_default_scheme() == 'posix_local':
+ local_path = os.path.join(home_dir, 'local')
+ if not os.path.exists(local_path):
+ os.mkdir(local_path)
+ for subdir_name in os.listdir(home_dir):
+ if subdir_name == 'local':
+ continue
+ copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \
+ os.path.join(local_path, subdir_name), symlink)
+
+def fix_lib64(lib_dir, symlink=True):
+ """
+ Some platforms (particularly Gentoo on x64) put things in lib64/pythonX.Y
+ instead of lib/pythonX.Y. If this is such a platform we'll just create a
+ symlink so lib64 points to lib
+ """
+ # PyPy's library path scheme is not affected by this.
+ # Return early or we will die on the following assert.
+ if is_pypy:
+ logger.debug('PyPy detected, skipping lib64 symlinking')
+ return
+ # Check we have a lib64 library path
+ if not [p for p in distutils.sysconfig.get_config_vars().values()
+ if isinstance(p, basestring) and 'lib64' in p]:
+ return
+
+ logger.debug('This system uses lib64; symlinking lib64 to lib')
+
+ assert os.path.basename(lib_dir) == 'python%s' % sys.version[:3], (
+ "Unexpected python lib dir: %r" % lib_dir)
+ lib_parent = os.path.dirname(lib_dir)
+ top_level = os.path.dirname(lib_parent)
+ lib_dir = os.path.join(top_level, 'lib')
+ lib64_link = os.path.join(top_level, 'lib64')
+ assert os.path.basename(lib_parent) == 'lib', (
+ "Unexpected parent dir: %r" % lib_parent)
+ if os.path.lexists(lib64_link):
+ return
+ if symlink:
+ os.symlink('lib', lib64_link)
+ else:
+ copyfile('lib', lib64_link)
+
+def resolve_interpreter(exe):
+ """
+ If the executable given isn't an absolute path, search $PATH for the interpreter
+ """
+ # If the "executable" is a version number, get the installed executable for
+ # that version
+ python_versions = get_installed_pythons()
+ if exe in python_versions:
+ exe = python_versions[exe]
+
+ if os.path.abspath(exe) != exe:
+ paths = os.environ.get('PATH', '').split(os.pathsep)
+ for path in paths:
+ if os.path.exists(join(path, exe)):
+ exe = join(path, exe)
+ break
+ if not os.path.exists(exe):
+ logger.fatal('The executable %s (from --python=%s) does not exist' % (exe, exe))
+ raise SystemExit(3)
+ if not is_executable(exe):
+ logger.fatal('The executable %s (from --python=%s) is not executable' % (exe, exe))
+ raise SystemExit(3)
+ return exe
+
+def is_executable(exe):
+ """Checks a file is executable"""
+ return os.access(exe, os.X_OK)
+
+############################################################
+## Relocating the environment:
+
+def make_environment_relocatable(home_dir):
+ """
+ Makes the already-existing environment use relative paths, and takes out
+ the #!-based environment selection in scripts.
+ """
+ home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+ activate_this = os.path.join(bin_dir, 'activate_this.py')
+ if not os.path.exists(activate_this):
+ logger.fatal(
+ 'The environment doesn\'t have a file %s -- please re-run virtualenv '
+ 'on this environment to update it' % activate_this)
+ fixup_scripts(home_dir, bin_dir)
+ fixup_pth_and_egg_link(home_dir)
+ ## FIXME: need to fix up distutils.cfg
+
+OK_ABS_SCRIPTS = ['python', 'python%s' % sys.version[:3],
+ 'activate', 'activate.bat', 'activate_this.py',
+ 'activate.fish', 'activate.csh']
+
+def fixup_scripts(home_dir, bin_dir):
+ if is_win:
+ new_shebang_args = (
+ '%s /c' % os.path.normcase(os.environ.get('COMSPEC', 'cmd.exe')),
+ '', '.exe')
+ else:
+ new_shebang_args = ('/usr/bin/env', sys.version[:3], '')
+
+ # This is what we expect at the top of scripts:
+ shebang = '#!%s' % os.path.normcase(os.path.join(
+ os.path.abspath(bin_dir), 'python%s' % new_shebang_args[2]))
+ # This is what we'll put:
+ new_shebang = '#!%s python%s%s' % new_shebang_args
+
+ for filename in os.listdir(bin_dir):
+ filename = os.path.join(bin_dir, filename)
+ if not os.path.isfile(filename):
+ # ignore subdirs, e.g. .svn ones.
+ continue
+ lines = None
+ with open(filename, 'rb') as f:
+ try:
+ lines = f.read().decode('utf-8').splitlines()
+ except UnicodeDecodeError:
+ # This is probably a binary program instead
+ # of a script, so just ignore it.
+ continue
+ if not lines:
+ logger.warn('Script %s is an empty file' % filename)
+ continue
+
+ old_shebang = lines[0].strip()
+ old_shebang = old_shebang[0:2] + os.path.normcase(old_shebang[2:])
+
+ if not old_shebang.startswith(shebang):
+ if os.path.basename(filename) in OK_ABS_SCRIPTS:
+ logger.debug('Cannot make script %s relative' % filename)
+ elif lines[0].strip() == new_shebang:
+ logger.info('Script %s has already been made relative' % filename)
+ else:
+ logger.warn('Script %s cannot be made relative (it\'s not a normal script that starts with %s)'
+ % (filename, shebang))
+ continue
+ logger.notify('Making script %s relative' % filename)
+ script = relative_script([new_shebang] + lines[1:])
+ with open(filename, 'wb') as f:
+ f.write('\n'.join(script).encode('utf-8'))
+
+
+def relative_script(lines):
+ "Return a script that'll work in a relocatable environment."
+ activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this"
+ # Find the last future statement in the script. If we insert the activation
+ # line before a future statement, Python will raise a SyntaxError.
+ activate_at = None
+ for idx, line in reversed(list(enumerate(lines))):
+ if line.split()[:3] == ['from', '__future__', 'import']:
+ activate_at = idx + 1
+ break
+ if activate_at is None:
+ # Activate after the shebang.
+ activate_at = 1
+ return lines[:activate_at] + ['', activate, ''] + lines[activate_at:]
+
+def fixup_pth_and_egg_link(home_dir, sys_path=None):
+ """Makes .pth and .egg-link files use relative paths"""
+ home_dir = os.path.normcase(os.path.abspath(home_dir))
+ if sys_path is None:
+ sys_path = sys.path
+ for path in sys_path:
+ if not path:
+ path = '.'
+ if not os.path.isdir(path):
+ continue
+ path = os.path.normcase(os.path.abspath(path))
+ if not path.startswith(home_dir):
+ logger.debug('Skipping system (non-environment) directory %s' % path)
+ continue
+ for filename in os.listdir(path):
+ filename = os.path.join(path, filename)
+ if filename.endswith('.pth'):
+ if not os.access(filename, os.W_OK):
+ logger.warn('Cannot write .pth file %s, skipping' % filename)
+ else:
+ fixup_pth_file(filename)
+ if filename.endswith('.egg-link'):
+ if not os.access(filename, os.W_OK):
+ logger.warn('Cannot write .egg-link file %s, skipping' % filename)
+ else:
+ fixup_egg_link(filename)
+
+def fixup_pth_file(filename):
+ lines = []
+ prev_lines = []
+ with open(filename) as f:
+ prev_lines = f.readlines()
+ for line in prev_lines:
+ line = line.strip()
+ if (not line or line.startswith('#') or line.startswith('import ')
+ or os.path.abspath(line) != line):
+ lines.append(line)
+ else:
+ new_value = make_relative_path(filename, line)
+ if line != new_value:
+ logger.debug('Rewriting path %s as %s (in %s)' % (line, new_value, filename))
+ lines.append(new_value)
+ if lines == prev_lines:
+ logger.info('No changes to .pth file %s' % filename)
+ return
+ logger.notify('Making paths in .pth file %s relative' % filename)
+ with open(filename, 'w') as f:
+ f.write('\n'.join(lines) + '\n')
+
+def fixup_egg_link(filename):
+ with open(filename) as f:
+ link = f.readline().strip()
+ if os.path.abspath(link) != link:
+ logger.debug('Link in %s already relative' % filename)
+ return
+ new_link = make_relative_path(filename, link)
+ logger.notify('Rewriting link %s in %s as %s' % (link, filename, new_link))
+ with open(filename, 'w') as f:
+ f.write(new_link)
+
+def make_relative_path(source, dest, dest_is_directory=True):
+ """
+ Make a filename relative, where the filename is dest, and it is
+ being referred to from the filename source.
+
+ >>> make_relative_path('/usr/share/something/a-file.pth',
+ ... '/usr/share/another-place/src/Directory')
+ '../another-place/src/Directory'
+ >>> make_relative_path('/usr/share/something/a-file.pth',
+ ... '/home/user/src/Directory')
+ '../../../home/user/src/Directory'
+ >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+ './'
+ """
+ source = os.path.dirname(source)
+ if not dest_is_directory:
+ dest_filename = os.path.basename(dest)
+ dest = os.path.dirname(dest)
+ dest = os.path.normpath(os.path.abspath(dest))
+ source = os.path.normpath(os.path.abspath(source))
+ dest_parts = dest.strip(os.path.sep).split(os.path.sep)
+ source_parts = source.strip(os.path.sep).split(os.path.sep)
+ while dest_parts and source_parts and dest_parts[0] == source_parts[0]:
+ dest_parts.pop(0)
+ source_parts.pop(0)
+ full_parts = ['..']*len(source_parts) + dest_parts
+ if not dest_is_directory:
+ full_parts.append(dest_filename)
+ if not full_parts:
+ # Special case for the current directory (otherwise it'd be '')
+ return './'
+ return os.path.sep.join(full_parts)
+
+
+
+############################################################
+## Bootstrap script creation:
+
+def create_bootstrap_script(extra_text, python_version=''):
+ """
+ Creates a bootstrap script, which is like this script but with
+ extend_parser, adjust_options, and after_install hooks.
+
+ This returns a string that (written to disk of course) can be used
+ as a bootstrap script with your own customizations. The script
+ will be the standard virtualenv.py script, with your extra text
+ added (your extra text should be Python code).
+
+ If you include these functions, they will be called:
+
+ ``extend_parser(optparse_parser)``:
+ You can add or remove options from the parser here.
+
+ ``adjust_options(options, args)``:
+ You can change options here, or change the args (if you accept
+ different kinds of arguments, be sure you modify ``args`` so it is
+ only ``[DEST_DIR]``).
+
+ ``after_install(options, home_dir)``:
+
+ After everything is installed, this function is called. This
+ is probably the function you are most likely to use. An
+ example would be::
+
+ def after_install(options, home_dir):
+ subprocess.call([join(home_dir, 'bin', 'easy_install'),
+ 'MyPackage'])
+ subprocess.call([join(home_dir, 'bin', 'my-package-script'),
+ 'setup', home_dir])
+
+ This example immediately installs a package, and runs a setup
+ script from that package.
+
+ If you provide something like ``python_version='2.5'`` then the
+ script will start with ``#!/usr/bin/env python2.5`` instead of
+ ``#!/usr/bin/env python``. You can use this when the script must
+ be run with a particular Python version.
+ """
+ filename = __file__
+ if filename.endswith('.pyc'):
+ filename = filename[:-1]
+ with codecs.open(filename, 'r', encoding='utf-8') as f:
+ content = f.read()
+ py_exe = 'python%s' % python_version
+ content = (('#!/usr/bin/env %s\n' % py_exe)
+ + '## WARNING: This file is generated\n'
+ + content)
+ return content.replace('##EXT' 'END##', extra_text)
+
+##EXTEND##
+
+def convert(s):
+ b = base64.b64decode(s.encode('ascii'))
+ return zlib.decompress(b).decode('utf-8')
+
+##file site.py
+SITE_PY = convert("""
+eJzFPf1z2zaWv/OvwMqToZTKdOJ0e3tO3RsncVrfuYm3yc7m1vXoKAmyWFMkS5C2tTd3f/u9DwAE
++CHb2+6cphNLJPDw8PC+8PAeOhqNTopCZkuxyZd1KoWScblYiyKu1kqs8lJU66Rc7hdxWW3h6eIm
+vpZKVLlQWxVhqygInv/GT/BcfF4nyqAA3+K6yjdxlSziNN2KZFPkZSWXYlmXSXYtkiypkjhN/g4t
+8iwSz387BsFZJmDmaSJLcStLBXCVyFfiYlut80yM6wLn/DL6Y/xqMhVqUSZFBQ1KjTNQZB1XQSbl
+EtCElrUCUiaV3FeFXCSrZGEb3uV1uhRFGi+k+K//4qlR0zAMVL6Rd2tZSpEBMgBTAqwC8YCvSSkW
++VJGQryRixgH4OcNsQKGNsU1U0jGLBdpnl3DnDK5kErF5VaM53VFgAhlscwBpwQwqJI0De7y8kZN
+YElpPe7gkYiZPfzJMHvAPHH8LucAjh+z4C9Zcj9l2MA9CK5aM9uUcpXcixjBwk95Lxcz/WycrMQy
+Wa2ABlk1wSYBI6BEmswPClqOb/UKfXdAWFmujGEMiShzY35JPaLgrBJxqoBt6wJppAjzd3KexBlQ
+I7uF4QAikDToG2eZqMqOQ7MTOQAocR0rkJKNEuNNnGTArD/GC0L7r0m2zO/UhCgAq6XEL7Wq3PmP
+ewgArR0CTANcLLOadZYmNzLdTgCBz4B9KVWdVigQy6SUiyovE6kIAKC2FfIekJ6KuJSahMyZRm6n
+RH+iSZLhwqKAocDjSyTJKrmuS5IwsUqAc4Er3n/8Sbw7fXN28kHzmAHGMnu9AZwBCi20gxMMIA5q
+VR6kOQh0FJzjHxEvlyhk1zg+4NU0OHhwpYMxzL2I2n2cBQey68XVw8AcK1AmNFZA/f4bukzVGujz
+Pw+sdxCcDFGFJs7f7tY5yGQWb6RYx8xfyBnBtxrOd1FRrV8DNyiEUwGpFC4OIpggPCCJS7NxnklR
+AIulSSYnAVBoTm39VQRW+JBn+7TWLU4ACGWQwUvn2YRGzCRMtAvrNeoL03hLM9NNArvOm7wkxQH8
+ny1IF6VxdkM4KmIo/jaX10mWIULIC0G4F9LA6iYBTlxG4pxakV4wjUTI2otbokjUwEvIdMCT8j7e
+FKmcsviibt2tRmgwWQmz1ilzHLSsSL3SqjVT7eW9w+hLi+sIzWpdSgBezz2hW+X5VMxBZxM2Rbxh
+8arucuKcoEeeqBPyBLWEvvgdKHqiVL2R9iXyCmgWYqhgladpfgckOwoCIfawkTHKPnPCW3gH/wJc
+/DeV1WIdBM5IFrAGhcgPgUIgYBJkprlaI+Fxm2bltpJJMtYUebmUJQ31OGIfMOKPbIxzDT7klTZq
+PF1c5XyTVKiS5tpkJmzxsrBi/fia5w3TAMutiGamaUOnDU4vLdbxXBqXZC5XKAl6kV7bZYcxg54x
+yRZXYsNWBt4BWWTCFqRfsaDSWVWSnACAwcIXZ0lRp9RIIYOJGAbaFAR/E6NJz7WzBOzNZjlAhcTm
+ewH2B3D7O4jR3ToB+iwAAmgY1FKwfPOkKtFBaPRR4Bt905/HB049W2nbxEOu4iTVVj7OgjN6eFqW
+JL4LWWCvqSaGghlmFbp21xnQEcV8NBoFgXGHtsp8zVVQldsjYAVhxpnN5nWChm82Q1Ovf6iARxHO
+wF43287CAw1hOn0AKjldVmW+wdd2bp9AmcBY2CPYExekZSQ7yB4nvkbyuSq9ME3RdjvsLFAPBRc/
+nb4/+3L6SRyLy0alTdv67ArGPM1iYGuyCMBUrWEbXQYtUfElqPvEezDvxBRgz6g3ia+Mqxp4F1D/
+XNb0Gqax8F4Gpx9O3pyfzv7y6fSn2aezz6eAINgZGezRlNE81uAwqgiEA7hyqSJtX4NOD3rw5uST
+fRDMEjX75mtgN3gyvpYVMHE5hhlPRbiJ7xUwaDilphPEsdMALHg4mYjvxOHz568OCVqxLbYADMyu
+0xQfzrRFnyXZKg8n1PgXdumPWUlp/+3y6OsrcXwswl/i2zgMwIdqmjJL/Eji9HlbSOhawZ9xriZB
+sJQrEL0biQI6fk5+8YQ7wJJAy1zb6V/yJDPvmSvdIUh/jKkH4DCbLdJYKWw8m4VABOrQ84EOETvX
+KHVj6Fhs3a4TjQp+SgkLm2GXKf7Tg2I8p36IBqPodjGNQFw3i1hJbkXTh36zGeqs2WysBwRhJokB
+h4vVUChME9RZZQJ+LXEe6rC5ylP8ifBRC5AA4tYKtSQukt46RbdxWks1diYFRByPW2RERZso4kdw
+UcZgiZulm0za1DQ8A82AfGkOWrRsUQ4/e+DvgLoymzjc6PHei2mGmP477zQIB3A5Q1T3SrWgsHYU
+F6cX4tWLw310Z2DPubTU8ZqjhU6yWtqHK1gtIw+MMPcy8uLSZYV6Fp8e7Ya5iezKdFlhpZe4lJv8
+Vi4BW2RgZ5XFT/QGduYwj0UMqwh6nfwBVqHGb4xxH8qzB2lB3wGotyEoZv3N0u9xMEBmChQRb6yJ
+1HrXz6awKPPbBJ2N+Va/BFsJyhItpnFsAmfhPCZDkwgaArzgDCl1J0NQh2XNDivhjSDRXiwbxRoR
+uHPU1Ff09SbL77IZ74SPUemOJ5Z1UbA082KDZgn2xHuwQoBkDhu7hmgMBVx+gbK1D8jD9GG6QFna
+WwAgMPSKtmsOLLPVoynyrhGHRRiT14KEt5ToL9yaIWirZYjhQKK3kX1gtARCgslZBWdVg2YylDXT
+DAZ2SOJz3XnEW1AfQIuKEZjNsYbGjQz9Lo9AOYtzVyk5/dAif/nyhdlGrSm+gojNcdLoQqzIWEbF
+FgxrAjrBeGQcrSE2uAPnFsDUSrOm2P8k8oK9MVjPCy3b4AfA7q6qiqODg7u7u0hHF/Ly+kCtDv74
+p2+++dML1onLJfEPTMeRFh1qiw7oHXq00bfGAn1nVq7Fj0nmcyPBGkvyysgVRfy+r5NlLo72J1Z/
+Ihc3Zhr/Na4MKJCZGZSpDLQdNRg9U/vPoldqJJ6RdbZtxxP2S7RJtVbMt7rQo8rBEwC/ZZHXaKob
+TlDiK7BusENfynl9HdrBPRtpfsBUUU7Hlgf2X14hBj5nGL4ypniGWoLYAi2+Q/qfmG1i8o60hkDy
+oonq7J63/VrMEHf5eHm3vqYjNGaGiULuQInwmzxaAG3jruTgR7u2aPcc19Z8PENgLH1gmFc7lmMU
+HMIF12LqSp3D1ejxgjTdsWoGBeOqRlDQ4CTOmdoaHNnIEEGid2M2+7ywugXQqRU5NPEBswrQwh2n
+Y+3arOB4QsgDx+IlPZHgIh913r3gpa3TlAI6LR71qMKAvYVGO50DX44NgKkYlX8ZcUuzTfnYWhRe
+gx5gOceAkMFWHWbCN64PONob9bBTx+oP9WYa94HARRpzLOpR0AnlYx6hVCBNxdjvOcTilrjdwXZa
+HGIqs0wk0mpAuNrKo1eodhqmVZKh7nUWKVqkOXjFVisSIzXvfWeB9kH4uM+YaQnUZGjI4TQ6Jm/P
+E8BQt8Pw2XWNgQY3DoMYbRJF1g3JtIZ/wK2g+AYFo4CWBM2CeayU+RP7HWTOzld/GWAPS2hkCLfp
+kBvSsRgajnm/J5CMOhoDUpABCbvCSK4jq4MUOMxZIE+44bUclG6CESmQM8eCkJoB3Omlt8HBJxGe
+gJCEIuT7SslCfCVGsHxtUX2c7v5dudQEIcZOA3IVdPTi2I1sOFGN41aUw2doP75BZyVFDhw8B5fH
+DfS7bG6Y1gZdwFn3FbdFCjQyxWFGExfVK0MYN5j8h2OnRUMsM4hhKG8g70jHjDQJ7HJr0LDgBoy3
+5u2x9GM3YoF9x2GuDuXmHvZ/YZmoRa5Cipm0YxfuR3NFlzYW2/NkPoI/3gKMJlceJJnq+AVGWf6B
+QUIPetgH3ZsshkWWcXmXZCEpME2/Y39pOnhYUnpG7uATbacOYKIY8Tx4X4KA0NHnAYgTagLYlctQ
+abe/C3bnFEcWLncfeW7z5dGrqy5xp0MRHvvpX6rT+6qMFa5WyovGQoGr1TXgqHRhcnG21YeX+nAb
+twllrmAXKT5++iKQEBzXvYu3T5t6w/CIzYNz8j4GddBrD5KrNTtiF0AEtSIyykH4dI58PLJPndyO
+iT0ByJMYZseiGEiaT/4ROLsWCsbYX24zjKO1VQZ+4PU3X896IqMukt98PXpglBYx+sR+3PIE7cic
+VLBrtqWMU3I1nD4UVMwa1rFtignrc9r+aR676vE5NVo29t3fAj8GCobUJfgIL6YN2bpTxY/vTg3C
+03ZqB7DObtV89mgRYG+fz3+BHbLSQbXbOEnpXAEmv7+PytVs7jle0a89PEg7FYxDgr79l7p8AdwQ
+cjRh0p2OdsZOTMC5ZxdsPkWsuqjs6RyC5gjMywtwjz+HFU6ve+B7Bge/r7p8IiBvTqMeMmpbbIZ4
+wQclhz1K9gnzfvqMf9dZP27mw4L1/zHLF/+cST5hKgaaNh4+rH5iuXbXAHuEeRpwO3e4hd2h+axy
+ZZw7VklKPEfd9VzcUboCxVbxpAigLNnv64GDUqoPvd/WZclH16QCC1nu43HsVGCmlvH8ek3Mnjj4
+ICvExDZbUKzayevJ+4Qv1NFnO5Ow2Tf0c+c6NzErmd0mJfQFhTsOf/j442nYb0IwjgudHm9FHu83
+INwnMG6oiRM+pQ9T6Cld/nH10d66+AQ1GQEmIqzJ1iVsJxBs4gj9a/BARMg7sOVjdtyhL9ZycTOT
+lDqAbIpdnaD4W3yNmNiMAj//S8UrSmKDmSzSGmnFjjdmH67qbEHnI5UE/0qnCmPqECUEcPhvlcbX
+Ykydlxh60txI0anbuNTeZ1HmmJwq6mR5cJ0shfy1jlPc1svVCnDBwyv9KuLhKQIl3nFOAyctKrmo
+y6TaAglileuzP0p/cBrOtzzRsYckH/MwATEh4kh8wmnjeybc0pDLBAf8Ew+cJO67sYOTrBDRc3if
+5TMcdUY5vlNGqnsuT4+D9gg5ABgBUJj/aKIjd/4bSa/cA0Zac5eoqCU9UrqRhpycMYQynmCkg3/T
+T58RXd4awPJ6GMvr3Vhet7G87sXy2sfyejeWrkjgwtqglZGEvsBV+1ijN9/GjTnxMKfxYs3tMPcT
+czwBoijMBtvIFKdAe5EtPt8jIKS2nQNnetjkzyScVFrmHALXIJH78RBLb+ZN8rrTmbJxdGeeinFn
+h3KI/L4HUUSpYnPqzvK2jKs48uTiOs3nILYW3WkDYCra6UQcK81uZ3OO7rYs1ejiPz//8PEDNkdQ
+I5PeQN1wEdGw4FTGz+PyWnWlqdn8FcCO1NJPxKFuGuDeIyNrPMoe//OOMjyQccQdZSjkogAPgLK6
+bDM39ykMW891kpR+zkzOh03HYpRVo2ZSA0Q6ubh4d/L5ZEQhv9H/jlyBMbT1pcPFx7SwDbr+m9vc
+Uhz7gFDr2FZj/Nw5ebRuOOJhG2vAdjzf1oPDxxjs3jCBP8t/KqVgSYBQkQ7+PoVQj945/Kb9UIc+
+hhE7yX/uyRo7K/adI3uOi+KIft+xQ3sA/7AT9xgzIIB2ocZmZ9DslVtK35rXHRR1gD7S1/vNe832
+1qu9k/EpaifR4wA6lLXNht0/75yGjZ6S1ZvT788+nJ+9uTj5/IPjAqIr9/HTwaE4/fGLoPwQNGDs
+E8WYGlFhJhIYFrfQSSxz+K/GyM+yrjhIDL3enZ/rk5oNlrpg7jPanAiecxqThcZBM45C24c6/wgx
+SvUGyakponQdqjnC/dKG61lUrvOjqVRpjs5qrbdeulbM1JTRuXYE0geNXVIwCE4xg1eUxV6ZXWHJ
+J4C6zqoHKW2jbWJISkHBTrqAc/5lTle8QCl1hidNZ63oL0MX1/AqUkWawE7udWhlSXfD9JiGcfRD
+e8DNePVpQKc7jKwb8qwHsUCr9Trkuen+k4bRfq0Bw4bB3sG8M0npIZSBjcltIsRGfJITynv4apde
+r4GCBcODvgoX0TBdArOPYXMt1glsIIAn12B9cZ8AEFor4R8IHDnRAZljdkb4drPc/3OoCeK3/vnn
+nuZVme7/TRSwCxKcShT2ENNt/A42PpGMxOnH95OQkaPUXPHnGssDwCGhAKgj7ZS/xCfos7GS6Urn
+l/j6AF9oP4Fet7qXsih1937XOEQJeKbG5DU8U4Z+IaZ7WdhTnMqkBRorHyxmWEHopiGYz574tJZp
+qvPdz96dn4LviMUYKEF87nYKw3G8BI/QdfIdVzi2QOEBO7wukY1LdGEpyWIZec16g9YoctTby8uw
+60SB4W6vThS4jBPloj3GaTMsU04QISvDWphlZdZutUEKu22I4igzzBKzi5ISWH2eAF6mpzFviWCv
+hKUeJgLPp8hJVpmMxTRZgB4FlQsKdQpCgsTFekbivDzjGHheKlMGBQ+LbZlcrys83YDOEZVgYPMf
+T76cn32gsoTDV43X3cOcU9oJTDmJ5BhTBDHaAV/ctD/kqtmsj2f1K4SB2gf+tF9xdsoxD9Dpx4FF
+/NN+xXVox85OkGcACqou2uKBGwCnW5/cNLLAuNp9MH7cFMAGMx8MxSKx7EUnerjz63KibdkyJRT3
+MS+fcICzKmxKmu7spqS1P3qOqwLPuZbj/kbwtk+2zGcOXW86b4aS39xPRwqxJBYw6rb2xzDZYZ2m
+ejoOsw1xC21rtY39OXNipU67RYaiDEQcu50nLpP1K2HdnDnQS6PuABPfanSNJPaq8tHP2Uh7GB4m
+ltidfYrpSGUsZAQwkiF17U8NPhRaBFAglP07diR3Onl+6M3RsQYPz1HrLrCNP4Ai1Lm4VOORl8CJ
+8OVXdhz5FaGFevRIhI6nkskst3li+Llbo1f50p9jrwxQEBPFroyzazlmWFMD8yuf2AMhWNK2Hqkv
+k6s+wyLOwDm9H+Dwrlz0H5wY1FqM0Gl3I7dtdeSTBxv0loLsJJgPvozvQPcXdTXmlRw4h+6tpRuG
++jBEzD6Epvr0fRxiOObXcGB9GsC91NCw0MP7deDsktfGOLLWPraqmkL7QnuwixK2ZpWiYxmnONH4
+otYLaAzucWPyR/apThSyv3vqxJyYkAXKg7sgvbmNdINWOGHE5UpcOZpQOnxTTaPfLeWtTMFogJEd
+Y7XDL7baYRLZcEpvHthvxu5ie7Htx43eNJgdmXIMRIAKMXoDPbsQanDAFf5Z70Ti7Iac47d/PZuK
+tx9+gn/fyI9gQbHmcSr+BqOLt3kJ20ou2qXbFLCAo+L9Yl4rLIwkaHRCwRdPoLd24ZEXT0N0ZYlf
+UmIVpMBk2nLDt50AijxBKmRv3ANTLwG/TUFXywk1DmLfWoz0S6TBcI0L1oUc6JbRutqkaCac4Eiz
+iJej87O3px8+nUbVPTK2+Tlygid+HhZORx8Nl3gMNhX2yaLGJ1eOv/yDTIsed1nvNU29DO41RQjb
+kcLuL/kmjdjuKeISAwai2C7zRYQtgdO5RK+6A/954mwrH7TvnnFFWOOJPjxrnHh8DNQQP7f1zwga
+Uh89J+pJCMVzrBXjx9Go3wJPBUW04c/zm7ulGxDXRT80wTamzazHfnerAtdMZw3PchLhdWyXwdSB
+pkmsNvOFWx/4MRP6IhRQbnS8IVdxnVZCZrCVor093UgBCt4t6WMJYVZhK0Z1bhSdSe/irXJyj2Il
+RjjqiIrq8RyGAoWw9f4xvmEzgLWGouYSaIBOiNK2KXe6qnqxZgnmnRBRryff4C7JXrnJL5rCPChv
+jBeN/wrzRG+RMbqWlZ4/PxhPLl82CQ4UjF54Bb2LAoydyyZ7oDGL58+fj8S/Pez0MCpRmuc34I0B
+7F5n5ZxeDxhsPTm7Wl2H3ryJgB8Xa3kJD64oaG6f1xlFJHd0pQWR9q+BEeLahJYZTfuWOeZYXcnn
+y9yCz6m0wfhLltB1RxhRkqhs9a1RGG0y0kQsCYohjNUiSUKOTsB6bPMaa/Ewuqj5Rd4DxycIZopv
+8WCMd9hrdCwpb9Zyj0XnWIwI8IhSyng0KmamajTAc3ax1WjOzrKkaspIXrhnpvoKgMreYqT5SsR3
+KBlmHi1iOGWdHqs2jnW+k0W9jUq+uHTjjK1Z8uuHcAfWBknLVyuDKTw0i7TIZbkw5hRXLFkklQPG
+tEM43JkubyLrEwU9KI1AvZNVWFqJtm//YNfFxfQjHR/vm5F01lBlL8TimFCctfIKo6gZn6JPlpCW
+b82XCYzygaLZ2hPwxhJ/0LFUrCHw7u1wyxnrTN/HwWkbzSUdAIfugLIK0rKjpyOci8csfGbagVs0
+8EM7c8LtNimrOk5n+tqHGfppM3uervG0ZXA7CzyttwK+fQ6O777O2AfHwSTXID0x49ZUZByLlY5M
+RG5lmV+EVeTo5R2yrwQ+BVJmOTP10CZ2dGnZ1Raa6gRHR8UjqK9M8dKAQ26qZjoFJy7mU0pvMuUO
+A86zn29JV1eI78T41VQctnY+i2KLNzkBss+Woe+KUTeYihMMMHNs34shvjsW45dT8ccd0KOBAY4O
+3RHa+9gWhEEgr66eTMY0mRPZwr4U9of76hxG0PSM4+SqTf4umb4lKv1ri0pcIagTlV+2E5VbYw/u
+WzsfH8lwA4pjlcjl/jOFJNRIN7p5mMEJPyyg37M5Wrp2vKmoocK5OWxG7ho96GhE4zbbQUxRulZf
+XL+LuoYNp71zwKTJtFIV7S1zmMao0WsRFQDM+o7S8Bve7QLvNSlc/2zwiFUXAViwPREEXenJB2ZN
+w0ZQH3QEn6QBHmAUEeJhaqMoXMl6goiEdA8OMdFXrUNsh+N/d+bhEoOho9AOlt98vQtPVzB7izp6
+FnR3pYUnsra8ollu8+kPzHmM0tf1NwmMA6URHXBWzVWV5GYeYfYy30GT2yzmDV4GSSfTaBJT6bpN
+vJXmW7/Qj6HYASWTwVqAJ1Wv8CD5lu62PFGU9IZX1Hx9+HJqKoMZkJ7Aq+jVV/oKSOpmLj/wfeyp
+3rvBS93vMPoXB1hS+b3tq85uhqZ13LoLyh8spOjZJJpZOjSG6eE6kGbNYoF3JjbEZN/aXgDyHryd
+Ofg55vLTHBw22JBGfei6GqOR3iHVNiDAD5uMIcl5VNdGkSLSu4RtSHnuUpxPFgXdq9+CYAgBOX8d
+8xt0BeviyIbYjE3Bk8+xm82Jn+qmt+6M7Qka2+om3DV97r9r7rpFYGdukhk6c/frS10a6L7DVrSP
+Bhze0IR4VIlEo/H7jYlrB6Y6h6Y/Qq8/SH63E850wKw8BMZk7GC8n9hTY2/M/iZeuN8xIWyfL2R2
+y4l7nY3WtDs2o83xj/EUOPkFn9sbBiijaak5kPdLdMPejHNkZ/L6Ws1ivN1xRptsyufq7J7Mtu09
+Xc4nY7U1uy28tAhAGG7Smbducj0wBuhKvmWa06Gc22kEDU1Jw04WskqWbBL01g7ARRwxpf4mEM9p
+xKNUYqBb1WVRwm54pO8i5jydvtTmBqgJ4G1idWNQNz2m+mpaUqyUHGZKkDlO20ryASKwEe+YhtnM
+vgNeedFcs5BMLTPIrN7IMq6aK4b8jIAENl3NCFR0jovrhOcaqWxxiYtYYnnDQQoDZPb7V7Cx9DbV
+O+5VmFht93h2oh465PuUKxscY2S4OLm31wu611ot6Wpr1zu0zRqus1cqwTKYu/JIR+pYGb/V93fx
+HbMcyUf/0uEfkHe38tLPQrfqjL1bi4bzzFUI3Qub8MYAMs599zB2OKB742JrA2zH9/WFZZSOhznQ
+2FJR++S9CqcZbdJEkDBh9IEIkl8U8MQIkgf/kREkfWsmGBqNj9YDvWUCD4SaWD24V1A2jAB9ZkAk
+PMBuXWBoTOXYTbovcpXcj+yF0qwrnUo+Yx6QI7t3kxEIvmpSuRnK3lVwuyJIvnTR4+/PP745OSda
+zC5O3v7HyfeUlIXHJS1b9egQW5bvM7X3vfRvN9ymE2n6Bm+w7bkhlmuYNITO+04OQg+E/nq1vgVt
+KzL39VCHTt1PtxMgvnvaLahDKrsXcscv0zUmbvpMK0870E85qdb8cjITzCNzUsfi0JzEmffN4YmW
+0U5seWjhnPTWrjrR/qq+BXQg7j2xSda0Anhmgvxlj0xMxYwNzLOD0v7ffFBmOFYbmht0QAoX0rnJ
+kS5xZFCV//8TKUHZxbi3Y0dxau/mpnZ8PKTspfN49ruQkSGIV+436s7PFfalTAeoEASs8PQ9hYyI
+0X/6QNWmHzxT4nKfCov3Udlc2V+4Ztq5/WuCSQaVve9LcYISH7NC41WduokDtk+nAzl9dBqVr5xK
+FtB8B0DnRjwVsDf6S6wQ51sRwsZRu2SYHEt01Jf1Ocij3XSwN7R6IfaHyk7dskshXg43XLYqO3WP
+Q+6hHuihalPc51hgzNIcqicV3xFkPs4UdMGX53zgGbre9sPX28uXR/ZwAfkdXzuKhLLJRo5hv3Sy
+MXdeKul0J2Ypp5Suh3s1JySsW1w5UNknGNrbdEpSBvY/Js+BIY289/0hM9PDu3p/1MbUst4RTEmM
+n6kJTcsp4tG42yeT7nQbtdUFwgVJjwDSUYEAC8F0dKOTILrlLO/xC70bnNd0Ha97whQ6UkHJYj5H
+cA/j+zX4tbtTIfGjujOKpj83aHOgXnIQbvYduNXEC4UMm4T21Bs+GHABuCa7v//LR/TvpjHa7oe7
+/Grb6lVvHSD7spj5iplBLRKZxxEYGdCbY9LWWC5hBB2voWno6DJUMzfkC3T8KJsWL9umDQY5szPt
+AVijEPwfucjncQ==
+""")
+
+##file activate.sh
+ACTIVATE_SH = convert("""
+eJytVd9v2kAMfs9fYQLq2m4MscdNVKMqEkgtVIQxbeuUHolpTgsXdHehpT/+9/mSEBJS2MOaB0ji
+z77P9menDpOAK5jzEGERKw0zhFihD/dcB2CrKJYewoyLFvM0XzGNNpzOZbSAGVPBqVWHdRSDx4SI
+NMhYANfgc4meDteW5ePGC45P4MkCumKhUENzDsu1H3lw1vJx1RJxGMKns6O2lWDqINGgotAHFCsu
+I7FAoWHFJGezEFWGqsEvaD5C42naHb93X+A3+elYCgVaxgh8DmQAys9HL2SS0mIaWBgm7mTN/O3G
+kzu6vHCng/HkW/fSve5O+hTOpnhfQAcoEry5jKVjNypoO0fgwzKSOgHm79KUK06Jfc7/RebHpD8a
+9kdXvT2UcnuFWG6p0stNB0mWUUQ1q3uiGRVEMfXHR03dTuQATPjwqIIPcB9wL4CArRAY/ZHJixYL
+Y9YBtcAoLQtFevOoI9QaHcEdMSAB0d08kuZhyUiSmav6CPCdVBnFOjNrLu6yMCWgKRA0TInBC5i4
+QwX3JG/mm581GKnSsSSxJTFHf9MAKr8w5T/vOv1mUurn5/zlT6fvTntjZzAaNl9rQ5JkU5KIc0GX
+inagwU57T2eddqWlTrvaS6d9sImZeUMkhWysveF0m37NcGub9Dpgi0j4qGiOzATjDr06OBjOYQOo
+7RBoGtNm9Denv1i0LVI7lxJDXLHSSBeWRflsyyqw7diuW3h0XdvK6lBMyaoMG1UyHdTsoYBuue75
+YOgOu1c91/2cwYpznPPeDoQpGL2xSm09NKp7BsvQ2hnT3aMs07lUnskpxewvBk73/LLnXo9HV9eT
+ijB3hWBO2ygoiWg/bKuZxqCCQq0DD3vkWIVvI2KosIw+vqW1gIItEG5KJb+xb09g65ktwYKgTc51
+uGJ/EFQs0ayEWLCQM5V9N4g+1+8UbXOJzF8bqhKtIqIwicWvzNFROZJlpfD8A7Vc044R0FxkcezG
+VzsV75usvTdYef+57v5n1b225qhXfwEmxHEs
+""")
+
+##file activate.fish
+ACTIVATE_FISH = convert("""
+eJyFVVFv0zAQfs+vONJO3RDNxCsSQoMVrdK2Vl03CSHkesllMXLsYDvZivjx2GmTOG0YfWhV+7u7
+73z33Y1gnTENKeMIeakNPCKUGhP7xcQTbCJ4ZOKcxoZV1GCUMp1t4O0zMxkTQEGVQjicO4dTyIwp
+Ppyfu386Q86jWOZwBhq1ZlK8jYIRXEoQ0jhDYAYSpjA2fBsFQVoKG0UKSLAJB9MEJrMXi6uYMiXl
+KCrIZYJARQIKTakEGAkmQ+tU5ZSDRTAlRY7CRJMA7GdkgRoNSJ74t1BRxegjR12jWAoGbfpTAeGY
+LK4vycN8tb6/uCbLi/VVWGPcx3maPr2AO4VjYB+HMAxAkQT/i/ptfbW4vVrczAZit3eHDNqL13n0
+Ya+w+Tq/uyLL1eJmuSaLh9lqNb/0+IzgznqnAjAvzBa4jG0BNmNXfdJUkxTU2I6xRaKcy+e6VApz
+WVmoTGFTgwslrYdN03ONrbbMN1E/FQ7H7gOP0UxRjV67TPRBjF3naCMV1mSkYk9MUN7F8cODZzsE
+iIHYviIe6n8WeGQxWKuhl+9Xa49uijq7fehXMRxT9VR9f/8jhDcfYSKkSOyxKp22cNIrIk+nzd2b
+Yc7FNpHx8FUn15ZfzXEE98JxZEohx4r6kosCT+R9ZkHQtLmXGYSEeH8JCTvYkcRgXAutp9Rw7Jmf
+E/J5fktuL25m1tMe3vLdjDt9bNxr2sMo2P3C9BccqGeYhqfQITz6XurXaqdf99LF1mT2YJrvzqCu
+5w7dKvV3PzNyOb+7+Hw923dOuB+AX2SxrZs9Lm0xbCH6kmhjUyuWw+7cC7DX8367H3VzDz6oBtty
+tMIeobE21JT6HaRS+TbaoqhbE7rgdGs3xtE4cOF3xo0TfxwsdyRlhUoxuzes18r+Jp88zDx1G+kd
+/HTrr1BY2CeuyfnbQtAcu9j+pOw6cy9X0k3IuoyKCZPC5ESf6MkgHE5tLiSW3Oa+W2NnrQfkGv/h
+7tR5PNFnMBlw4B9NJTxnzKA9fLTT0aXSb5vw7FUKzcTZPddqYHi2T9/axJmEEN3qHncVCuEPaFmq
+uEtpcBj2Z1wjrqGReJBHrY6/go21NA==
+""")
+
+##file activate.csh
+ACTIVATE_CSH = convert("""
+eJx1U2FP2zAQ/e5f8TAV3Soo+0zXbYUiDQkKQgVp2ibjJNfFUuIg22nVf885SVFLO3+I7Lt3fr6X
+d8eY58ZjYQpCWfuAhFB7yrAyIYf0Ve1SQmLsuU6DWepAw9TnEoOFq0rwdjAUx/hV1Ui1tVWAqy1M
+QGYcpaFYx+yVI67LkKwx1UuTEaYGl4X2Bl+zJpAlP/6V2hTDtCq/DYXQhdEeGW040Q/Eb+t9V/e3
+U/V88zh/mtyqh8n8J47G+IKTE3gKZJdoYrK3h5MRU1tGYS83gqNc+3yEgyyP93cP820evHLvr2H8
+kaYB/peoyY7aVHzpJnE9e+6I5Z+ji4GMTNJWNuOQq6MA1N25p8pW9HWdVWlfsNpPDbdxjgpaahuw
+1M7opCA/FFu1uwxC7L8KUqmto1KyQe3rx0I0Eovdf7BVe67U5c1MzSZ310pddGheZoFPWyytRkzU
+aCA/I+RkBXhFXr5aWV0SxjhUI6jwdAj8kmhPzX7nTfJFkM3MImp2VdVFFq1vLHSU5szYQK4Ri+Jd
+xlW2JBtOGcyYVW7SnB3v6RS91g3gKapZ0oWxbHVteYIIq3iv7QeuSrUj6KSqQ+yqsxDj1ivNQxKF
+YON10Q+NH/ARS95i5Tuqq2Vxfvc23f/FO6zrtXXmJr+ZtMY9/A15ZXFWtmch2rEQ4g1ryVHH
+""")
+
+##file activate.bat
+ACTIVATE_BAT = convert("""
+eJx9Ul9LhEAQfxf8DoOclI/dYyFkaCmcq4gZQTBUrincuZFbff12T133TM+nnd35/Zvxlr7XDFhV
+mUZHOVhFlOWP3g4DUriIWoVomYZpNBWUtGpaWgImO191pFkSpzlcmgaI70jVX7n2Qp8tuByg+46O
+CMHbMq64T+nmlJt082D1T44muCDk2prgEHF4mdI9RaS/QwSt3zSyIAaftRccvqVTBziD1x/WlPD5
+xd729NDBb8Nr4DU9QNMKsJeH9pkhPedhQsIkDuCDCa6A+NF9IevVFAohkqizdHetg/tkWvPoftWJ
+MCqnOxv7/x7Np6yv9P2Ker5dmX8yNyCkkWnbZy3N5LarczlqL8htx2EM9rQ/2H5BvIsIEi8OEG8U
++g8CsNTr
+""")
+
+##file deactivate.bat
+DEACTIVATE_BAT = convert("""
+eJyFkN0KgkAUhO8F32EQpHqFQEjQUPAPMaErqVxzId3IrV6/XST/UDx3c86c4WMO5FYysKJQFVVp
+CEfqxsnJ9DI7SA25i20fFqs3HO+GYLsDZ7h8GM3xfLHrg1QNvpSX4CWpQGvokZk4uqrQAjXjyElB
+a5IjCz0r+2dHcehHCe5MZNmB5R7TdqMqECMptHZh6DN/utb7Zs6Cej8OXYE5J04YOKFvD4GkHuJ0
+pilSd1jG6n87tDZ+BUwUOepI6CGSkFMYWf0ihvT33Qj1A+tCkSI=
+""")
+
+##file activate.ps1
+ACTIVATE_PS = convert("""
+eJylWdmO41hyfW+g/0FTU7C7IXeJIqmtB/3AnZRIStxF2kaBm7gv4ipyMF/mB3+Sf8GXVGVl1tLT
+43ECSqR4b5wbETeWE8z/+a///vNCDaN6cYtSf5G1dbNw/IVXNIu6aCvX9xa3qsgWl0IJ/7IYinbh
+2nkOVqs2X0TNjz/8eeFFle826fBhQRaLBkD9uviw+LCy3Sbq7Mb/UNbrH3+YNtLcVaB+Xbipb+eL
+tly0eVsD/M6u6g8//vC+dquobH5VWU75eMFUdvHb4n02RHlXuHYTFfmHbHCLLLNz70NpN+GrBI4p
+1EeSk4FAXaZR88u0vPip8usi7fznt3fvP+OuPnx49/Pil4td+XnzigIAPoqYQH2J8v4z+C+8b98m
+Q25t7k76LIK0cOz0V89/MXXx0+Lf6z5q3PA/F+/FIif9uqnaadFf/PzXSXYBfqIb2NeApecJwPzI
+dlL/149nnvyoc7KqYfzTAT8v/voUmX7e+3n364tffl/oVaDyswKY/7J18e6bve8Wv9RuUfqfLHmK
+/u139Hwx+9ePRep97KKqae30YwmCo2y+0vTz1k+rv7159B3pb1SOGj97Pe8/flfkC1Vn/7xYR4n6
+lypNEGDDV5f7lcjil3S+4++p881Wv6qKyn5GQg1yJwcp4BZ5E+Wt/z1P/umbiHir4J8Xip/eFt6n
+9T/9gU9eY+7zUX97Jlmb136ziKrKT/3OzpvP8VX/+MObSP0lL3LvVZlJ9v1b8357jXyw8rXxYPXN
+11n4UzJ8G8S/vUbuJ6RPj999DbtS5kys//JusXwrNLnvT99cFlBNwXCe+niRz8JF/ezNr9Pze+H6
+18W7d5PPvozW7+387Zto/v4pL8BvbxTzvIW9KCv/Fj0WzVQb/YXbVlPZWTz3/9vCaRtQbPN/Bb+j
+2rUrDxTVD68gfQXu/ZewAFX53U/vf/rD2P3558W7+W79Po1y/xXoX/6RFHyNIoVjgAG4H0RTcAe5
+3bSVv3DSwk2mZYHjFB8zj6fC4sLOFTHJJQrwzFYJgso0ApOoBzFiRzzQKjIQCCbQMIFJGCKqGUyS
+8AkjiF2wTwmMEbcEUvq8Nj+X0f4YcCQmYRiOY7eRbAJDqzm1chOoNstbJ8oTBhZQ2NcfgaB6QjLp
+U4+SWFjQGCZpyqby8V4JkPGs9eH1BscXIrTG24QxXLIgCLYNsIlxSYLA6SjAeg7HAg4/kpiIB8k9
+TCLm0EM4gKIxEj8IUj2dQeqSxEwYVH88qiRlCLjEYGuNIkJB1BA5dHOZdGAoUFk54WOqEojkuf4Q
+Ig3WY+96TDlKLicMC04h0+gDCdYHj0kz2xBDj9ECDU5zJ0tba6RKgXBneewhBG/xJ5m5FX+WSzsn
+wnHvKhcOciw9NunZ0BUF0n0IJAcJMdcLqgQb0zP19dl8t9PzmMBjkuIF7KkvHgqEovUPOsY0PBB1
+HCtUUhch83qEJPjQcNQDsgj0cRqx2ZbnnlrlUjE1EX2wFJyyDa/0GLrmKDEFepdWlsbmVU45Wiwt
+eFM6mfs4kxg8yc4YmKDy67dniLV5FUeO5AKNPZaOQQ++gh+dXE7dbJ1aTDr7S4WPd8sQoQkDyODg
+XnEu/voeKRAXZxB/e2xaJ4LTFLPYEJ15Ltb87I45l+P6OGFA5F5Ix8A4ORV6M1NH1uMuZMnmFtLi
+VpYed+gSq9JDBoHc05J4OhKetrk1p0LYiKipxLMe3tYS7c5V7O1KcPU8BJGdLfcswhoFCSGQqJ8f
+ThyQKy5EWFtHVuNhvTnkeTc8JMpN5li3buURh0+3ZGuzdwM55kon+8urbintjdQJf9U1D0ah+hNh
+i1XNu4fSKbTC5AikGEaj0CYM1dpuli7EoqUt7929f1plxGGNZnixFSFP2qzhlZMonu2bB9OWSqYx
+VuHKWNGJI8kqUhMTRtk0vJ5ycZ60JlodlmN3D9XiEj/cG2lSt+WV3OtMgt1Tf4/Z+1BaCus740kx
+Nvj78+jMd9tq537Xz/mNFyiHb0HdwHytJ3uQUzKkYhK7wjGtx3oKX43YeYoJVtqDSrCnQFzMemCS
+2bPSvP+M4yZFi/iZhAjL4UOeMfa7Ex8HKBqw4umOCPh+imOP6yVTwG2MplB+wtg97olEtykNZ6wg
+FJBNXSTJ3g0CCTEEMdUjjcaBDjhJ9fyINXgQVHhA0bjk9lhhhhOGzcqQSxYdj3iIN2xGEOODx4qj
+Q2xikJudC1ujCVOtiRwhga5nPdhe1gSa649bLJ0wCuLMcEYIeSy25YcDQHJb95nfowv3rQnin0fE
+zIXFkM/EwSGxvCCMgEPNcDp/wph1gMEa8Xd1qAWOwWZ/KhjlqzgisBpDDDXz9Cmov46GYBKHC4zZ
+84HJnXoTxyWNBbXV4LK/r+OEwSN45zBp7Cub3gIYIvYlxon5BzDgtPUYfXAMPbENGrI+YVGSeTQ5
+i8NMB5UCcC+YRGIBhgs0xhAGwSgYwywpbu4vpCSTdEKrsy8osXMUnHQYenQHbOBofLCNNTg3CRRj
+A1nXY2MZcjnXI+oQ2Zk+561H4CqoW61tbPKv65Y7fqc3TDUF9CA3F3gM0e0JQ0TPADJFJXVzphpr
+2FzwAY8apGCju1QGOiUVO5KV6/hKbtgVN6hRVwpRYtu+/OC6w2bCcGzZQ8NCc4WejNEjFxOIgR3o
+QqR1ZK0IaUxZ9nbL7GWJIjxBARUhAMnYrq/S0tVOjzlOSYRqeIZxaSaOBX5HSR3MFekOXVdUPbjX
+nru61fDwI8HRYPUS7a6Inzq9JLjokU6P6OzT4UCH+Nha+JrU4VqEo4rRHQJhVuulAnvFhYz5NWFT
+aS/bKxW6J3e46y4PLagGrCDKcq5B9EmP+s1QMCaxHNeM7deGEV3WPn3CeKjndlygdPyoIcNaL3dd
+bdqPs47frcZ3aNWQ2Tk+rjFR01Ul4XnQQB6CSKA+cZusD0CP3F2Ph0e78baybgioepG12luSpFXi
+bHbI6rGLDsGEodMObDG7uyxfCeU+1OiyXYk8fnGu0SpbpRoEuWdSUlNi5bd9nBxYqZGrq7Qa7zV+
+VLazLcelzzP9+n6+xUtWx9OVJZW3gk92XGGkstTJ/LreFVFF2feLpXGGuQqq6/1QbWPyhJXIXIMs
+7ySVlzMYqoPmnmrobbeauMIxrCr3sM+qs5HpwmmFt7SM3aRNQWpCrmeAXY28EJ9uc966urGKBL9H
+18MtDE5OX97GDOHxam11y5LCAzcwtkUu8wqWI1dWgHyxGZdY8mC3lXzbzncLZ2bIUxTD2yW7l9eY
+gBUo7uj02ZI3ydUViL7oAVFag37JsjYG8o4Csc5R7SeONGF8yZP+7xxi9scnHvHPcogJ44VH/LMc
+Yu6Vn3jEzCFw9Eqq1ENQAW8aqbUwSiAqi+nZ+OkZJKpBL66Bj8z+ATqb/8qDIJUeNRTwrI0YrVmb
+9FArKVEbCWUNSi8ipfVv+STgkpSsUhcBg541eeKLoBpLGaiHTNoK0r4nn3tZqrcIULtq20Df+FVQ
+Sa0MnWxTugMuzD410sQygF4qdntbswiJMqjs014Irz/tm+pd5oygJ0fcdNbMg165Pqi7EkYGAXcB
+dwxioCDA3+BY9+JjuOmJu/xyX2GJtaKSQcOZxyqFzTaa6/ot21sez0BtKjirROKRm2zuai02L0N+
+ULaX8H5P6VwsGPbYOY7sAy5FHBROMrMzFVPYhFHZ7M3ZCZa2hsT4jGow6TGtG8Nje9405uMUjdF4
+PtKQjw6yZOmPUmO8LjFWS4aPCfE011N+l3EdYq09O3iQJ9a01B3KXiMF1WmtZ+l1gmyJ/ibAHZil
+vQzdOl6g9PoSJ4TM4ghTnTndEVMOmsSSu+SCVlGCOLQRaw9oLzamSWP62VuxPZ77mZYdfTRGuNBi
+KyhZL32S2YckO/tU7y4Bf+QKKibQSKCTDWPUwWaE8yCBeL5FjpbQuAlb53mGX1jptLeRotREbx96
+gnicYz0496dYauCjpTCA4VA0cdLJewzRmZeTwuXWD0talJsSF9J1Pe72nkaHSpULgNeK1+o+9yi0
+YpYwXZyvaZatK2eL0U0ZY6ekZkFPdC8JTF4Yo1ytawNfepqUKEhwznp6HO6+2l7L2R9Q3N49JMIe
+Z+ax1mVaWussz98QbNTRPo1xu4W33LJpd9H14dd66ype7UktfEDi3oUTccJ4nODjwBKFxS7lYWiq
+XoHu/b7ZVcK5TbRD0F/2GShg2ywwUl07k4LLqhofKxFBNd1grWY+Zt/cPtacBpV9ys2z1moMLrT3
+W0Elrjtt5y/dvDQYtObYS97pqj0eqmwvD3jCPRqamGthLiF0XkgB6IdHLBBwDGPiIDh7oPaRmTrN
+tYA/yQKFxRiok+jM6ciJq/ZgiOi5+W4DEmufPEubeSuYJaM3/JHEevM08yJAXUQwb9LS2+8FOfds
+FfOe3Bel6EDSjIEIKs4o9tyt67L1ylQlzhe0Q+7ue/bJnWMcD3q6wDSIQi8ThnRM65aqLWesi/ZM
+xhHmQvfKBbWcC194IPjbBLYR9JTPITbzwRcu+OSFHDHNSYCLt29sAHO6Gf0h/2UO9Xwvhrjhczyx
+Ygz6CqP4IwxQj5694Q1Pe2IR+KF/yy+5PvCL/vgwv5mPp9n4kx7fnY/nmV++410qF/ZVCMyv5nAP
+pkeOSce53yJ6ahF4aMJi52by1HcCj9mDT5i+7TF6RoPaLL+cN1hXem2DmX/mdIbeeqwQOLD5lKO/
+6FM4x77w6D5wMx3g0IAfa2D/pgY9a7bFQbinLDPz5dZi9ATIrd0cB5xfC0BfCCZO7TKP0jQ2Meih
+nRXhkA3smTAnDN9IW2vA++lsgNuZ2QP0UhqyjUPrDmgfWP2bWWiKA+YiEK7xou8cY0+d3/bk0oHR
+QLrq4KzDYF/ljQDmNhBHtkVNuoDey6TTeaD3SHO/Bf4d3IwGdqQp6FuhmwFbmbQBssDXVKDBYOpk
+Jy7wxOaSRwr0rDmGbsFdCM+7XU/84JPu3D/gW7QXgzlvbjixn99/8CpWFUQWHFEz/RyXvzNXTTOd
+OXLNNFc957Jn/YikNzEpUdRNxXcC6b76ccTwMGoKj5X7c7TvHFgc3Tf4892+5A+iR+D8OaaE6ACe
+gdgHcyCoPm/xiDCWP+OZRjpzfj5/2u0i4qQfmIEOsTV9Hw6jZ3Agnh6hiwjDtGYxWvt5TiWEuabN
+77YCyRXwO8P8wdzG/8489KwfFBZWI6Vvx76gmlOc03JI1HEfXYZEL4sNFQ3+bqf7e2hdSWQknwKF
+ICJjGyDs3fdmnnxubKXebpQYLjPgEt9GTzKkUgTvOoQa1J7N3nv4sR6uvYFLhkXZ+pbCoU3K9bfq
+gF7W82tNutRRZExad+k4GYYsCfmEbvizS4jsRr3fdzqjEthpEwm7pmN7OgVzRbrktjrFw1lc0vM8
+V7dyTJ71qlsd7v3KhmHzeJB35pqEOk2pEe5uPeCToNkmedmxcKbIj+MZzjFSsvCmimaMQB1uJJKa
++hoWUi7aEFLvIxKxJavqpggXBIk2hr0608dIgnfG5ZEprqmH0b0YSy6jVXTCuIB+WER4d5BPVy9Q
+M4taX0RIlDYxQ2CjBuq78AAcHQf5qoKP8BXHnDnd/+ed5fS+csL4g3eWqECaL+8suy9r8hx7c+4L
+EegEWdqAWN1w1NezP34xsxLkvRRI0DRzKOg0U+BKfQY128YlYsbwSczEg2LqKxRmcgiwHdhc9MQJ
+IwKQHlgBejWeMGDYYxTOQUiJOmIjJbzIzHH6lAMP+y/fR0v1g4wx4St8fcqTt3gz5wc+xXFZZ3qI
+JpXI5iJk7xmNL2tYsDpcqu0375Snd5EKsIvg8u5szTOyZ4v06Ny2TZXRpHUSinh4IFp8Eoi7GINJ
+02lPJnS/9jSxolJwp2slPMIEbjleWw3eec4XaetyEnSSqTPRZ9fVA0cPXMqzrPYQQyrRux3LaAh1
+wujbgcObg1nt4iiJ5IMbc/WNPc280I2T4nTkdwG8H6iS5xO2WfsFsruBwf2QkgZlb6w7om2G65Lr
+r2Gl4dk63F8rCEHoUJ3fW+pU2Srjlmcbp+JXY3DMifEI22HcHAvT7zzXiMTr7VbUR5a2lZtJkk4k
+1heZZFdru8ucCWMTr3Z4eNnjLm7LW7rcN7QjMpxrsCzjxndeyFUX7deIs3PQkgyH8k6luI0uUyLr
+va47TBjM4JmNHFzGPcP6BV6cYgQy8VQYZe5GmzZHMxyBYhGiUdekZQ/qwyxC3WGylQGdUpSf9ZCP
+a7qPdJd31fPRC0TOgzupO7nLuBGr2A02yuUQwt2KQG31sW8Gd9tQiHq+hPDt4OzJuY4pS8XRsepY
+tsd7dVEfJFmc15IYqwHverrpWyS1rFZibDPW1hUUb+85CGUzSBSTK8hpvee/ZxonW51TUXekMy3L
+uy25tMTg4mqbSLQQJ+skiQu2toIfBFYrOWql+EQipgfT15P1aq6FDK3xgSjIGWde0BPftYchDTdM
+i4QdudHFkN0u6fSKiT09QLv2mtSblt5nNzBR6UReePNs+khE4rHcXuoK21igUKHl1c3MXMgPu7y8
+rKQDxR6N/rffXv+lROXet/9Q+l9I4D1U
+""")
+
+##file distutils-init.py
+DISTUTILS_INIT = convert("""
+eJytV1uL4zYUfvevOE0ottuMW9q3gVDa3aUMXXbLMlDKMBiNrSTqOJKRlMxkf33PkXyRbGe7Dw2E
+UXTu37lpxLFV2oIyifAncxmOL0xLIfcG+gv80x9VW6maw7o/CANSWWBwFtqeWMPlGY6qPjV8A0bB
+C4eKSTgZ5LRgFeyErMEeOBhbN+Ipgeizhjtnhkn7DdyjuNLPoCS0l/ayQTG0djwZC08cLXozeMss
+aG5EzQ0IScpnWtHSTXuxByV/QCmxE7y+eS0uxWeoheaVVfqSJHiU7Mhhi6gULbOHorshkrEnKxpT
+0n3A8Y8SMpuwZx6aoix3ouFlmW8gHRSkeSJ2g7hU+kiHLDaQw3bmRDaTGfTnty7gPm0FHbIBg9U9
+oh1kZzAFLaue2R6htPCtAda2nGlDSUJ4PZBgCJBGVcwKTAMz/vJiLD+Oin5Z5QlvDPdulC6EsiyE
+NFzb7McNTKJzbJqzphx92VKRFY1idenzmq3K0emRcbWBD0ryqc4NZGmKOOOX9Pz5x+/l27tP797c
+f/z0d+4NruGNai8uAM0bfsYaw8itFk8ny41jsfpyO+BWlpqfhcG4yxLdi/0tQqoT4a8Vby382mt8
+p7XSo7aWGdPBc+b6utaBmCQ7rQKQoWtAuthQCiold2KfJIPTT8xwg9blPumc+YDZC/wYGdAyHpJk
+vUbHbHWAp5No6pK/WhhLEWrFjUwtPEv1Agf8YmnsuXUQYkeZoHm8ogP16gt2uHoxcEMdf2C6pmbw
+hUMsWGhanboh4IzzmsIpWs134jVPqD/c74bZHdY69UKKSn/+KfVhxLgUlToemayLMYQOqfEC61bh
+cbhwaqoGUzIyZRFHPmau5juaWqwRn3mpWmoEA5nhzS5gog/5jbcFQqOZvmBasZtwYlG93k5GEiyw
+buHhMWLjDarEGpMGB2LFs5nIJkhp/nUmZneFaRth++lieJtHepIvKgx6PJqIlD9X2j6pG1i9x3pZ
+5bHuCPFiirGHeO7McvoXkz786GaKVzC9DSpnOxJdc4xm6NSVq7lNEnKdVlnpu9BNYoKX2Iq3wvgh
+gGEUM66kK6j4NiyoneuPLSwaCWDxczgaolEWpiMyDVDb7dNuLAbriL8ig8mmeju31oNvQdpnvEPC
+1vAXbWacGRVrGt/uXN/gU0CDDwgooKRrHfTBb1/s9lYZ8ZqOBU0yLvpuP6+K9hLFsvIjeNhBi0KL
+MlOuWRn3FRwx5oHXjl0YImUx0+gLzjGchrgzca026ETmYJzPD+IpuKzNi8AFn048Thd63OdD86M6
+84zE8yQm0VqXdbbgvub2pKVnS76icBGdeTHHXTKspUmr4NYo/furFLKiMdQzFjHJNcdAnMhltBJK
+0/IKX3DVFqvPJ2dLE7bDBkH0l/PJ29074+F0CsGYOxsb7U3myTUncYfXqnLLfa6sJybX4g+hmcjO
+kMRBfA1JellfRRKJcyRpxdS4rIl6FdmQCWjo/o9Qz7yKffoP4JHjOvABcRn4CZIT2RH4jnxmfpVG
+qgLaAvQBNfuO6X0/Ux02nb4FKx3vgP+XnkX0QW9pLy/NsXgdN24dD3LxO2Nwil7Zlc1dqtP3d7/h
+kzp1/+7hGBuY4pk0XD/0Ao/oTe/XGrfyM773aB7iUhgkpy+dwAMalxMP0DrBcsVw/6p25+/hobP9
+GBknrWExDhLJ1bwt1NcCNblaFbMKCyvmX0PeRaQ=
+""")
+
+##file distutils.cfg
+DISTUTILS_CFG = convert("""
+eJxNj00KwkAMhfc9xYNuxe4Ft57AjYiUtDO1wXSmNJnK3N5pdSEEAu8nH6lxHVlRhtDHMPATA4uH
+xJ4EFmGbvfJiicSHFRzUSISMY6hq3GLCRLnIvSTnEefN0FIjw5tF0Hkk9Q5dRunBsVoyFi24aaLg
+9FDOlL0FPGluf4QjcInLlxd6f6rqkgPu/5nHLg0cXCscXoozRrP51DRT3j9QNl99AP53T2Q=
+""")
+
+##file activate_this.py
+ACTIVATE_THIS = convert("""
+eJyNU01v2zAMvetXEB4K21jnDOstQA4dMGCHbeihlyEIDMWmE62yJEiKE//7kXKdpEWLzYBt8evx
+kRSzLPs6wiEoswM8YdMpjUXcq1Dz6RZa1cSiTkJdr86GsoTRHuCotBayiWqQEYGtMCgfD1KjGYBe
+5a3p0cRKiEe2NtLAFikftnDco0ko/SFEVgEZ8aRCZDIPY9xbA8pE9M4jfW/B2CjiHq9zbJVZuOQq
+siwTIvpxKYCembPAU4Muwi/Z4zfvrZ/MXipKeB8C+qisSZYiWfjJfs+0/MFMdWn1hJcO5U7G/SLa
+xVx8zU6VG/PXLXvfsyyzUqjeWR8hjGE+2iCE1W1tQ82hsCJN9dzKaoexyB/uH79TnjwvxcW0ntSb
+yZ8jq1Z5Q1UXsyy3gf9nbjTEj7NzQMfCJa/YSmrQ+2D/BqfiOi6sclrGzvoeVivIj8rcfcmnIQRF
+7XCyeZI7DFe5/lhlCs5PRf5QW66VXT/NrlQ46oD/D6InkOmi3IQcbhKxAX2g4a+Xd5s3UtCtG2py
+m8eg6WYWqR6SL5OjKMGfSrYt/6kxxQtOpeAgj1LXBNmpE2ElmCSIy5H0zFd8gJ924HWijWhb2hRC
+6wNEm1QdDZtuSZcEprIUBo/XRNcbQe1OUbQ/r3hPTaPJJDNtFLu8KHV5XoNr3Eo6h6YtOKw8e8yw
+VF5PnJ+ts3a9/Mz38RpG/AUSzYUW
+""")
+
+##file python-config
+PYTHON_CONFIG = convert("""
+eJyNVV1P2zAUfc+v8ODBiSABxlulTipbO6p1LWqBgVhlhcZpPYUkctzSivHfd6+dpGloGH2Ja/ue
+e+65Hz78xNhtf3x90xmw7vCWsRPGLvpDNuz87MKfdKMWSWxZ4ilNpCLZJiuWc66SVFUOZkkcirll
+rfxIBAzOMtImDzSVPBRrekwoX/OZu/0r4lm0DHiG60g86u8sjPw5rCyy86NRkB8QuuBRSqfAKESn
+3orLTCQxE3GYkC9tYp8fk89OSwNsmXgizrhUtnumeSgeo5GbLUMk49Rv+2nK48Cm/qMwfp333J2/
+dVcAGE0CIQHBsgIeEr4Wij0LtWDLzJ9ze5YEvH2WI6CHTAVcSu9ZCsXtgxu81CIvp6/k4eXsdfo7
+PvDCRD75yi41QitfzlcPp1OI7i/1/iQitqnr0iMgQ+A6wa+IKwwdxyk9IiXNAzgquTFU8NIxAVjM
+osm1Zz526e+shQ4hKRVci69nPC3Kw4NQEmkQ65E7OodxorSvxjvpBjQHDmWFIQ1mlmzlS5vedseT
+/mgIEsMJ7Lxz2bLAF9M5xeLEhdbHxpWOw0GdkJApMVBRF1y+a0z3c9WZPAXGFcFrJgCIB+024uad
+0CrzmEoRa3Ub4swNIHPGf7QDV+2uj2OiFWsChgCwjKqN6rp5izpbH6Wc1O1TclQTP/XVwi6anTr1
+1sbubjZLI1+VptPSdCfwnFBrB1jvebrTA9uUhU2/9gad7xPqeFkaQcnnLbCViZK8d7R1kxzFrIJV
+8EaLYmKYpvGVkig+3C5HCXbM1jGCGekiM2pRCVPyRyXYdPf6kcbWEQ36F5V4Gq9N7icNNw+JHwRE
+LTgxRXACpvnQv/PuT0xCCAywY/K4hE6Now2qDwaSE5FB+1agsoUveYDepS83qFcF1NufvULD3fTl
+g6Hgf7WBt6lzMeiyyWVn3P1WVbwaczHmTzE9A5SyItTVgFYyvs/L/fXlaNgbw8v3azT+0eikVlWD
+/vBHbzQumP23uBCjsYdrL9OWARwxs/nuLOzeXbPJTa/Xv6sUmQir5pC1YRLz3eA+CD8Z0XpcW8v9
+MZWF36ryyXXf3yBIz6nzqz8Muyz0m5Qj7OexfYo/Ph3LqvkHUg7AuA==
+""")
+
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+FAT_MAGIC = 0xcafebabe
+BIG_ENDIAN = '>'
+LITTLE_ENDIAN = '<'
+LC_LOAD_DYLIB = 0xc
+maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint')
+
+
+class fileview(object):
+ """
+ A proxy for file-like objects that exposes a given view of a file.
+ Modified from macholib.
+ """
+
+ def __init__(self, fileobj, start=0, size=maxint):
+ if isinstance(fileobj, fileview):
+ self._fileobj = fileobj._fileobj
+ else:
+ self._fileobj = fileobj
+ self._start = start
+ self._end = start + size
+ self._pos = 0
+
+ def __repr__(self):
+ return '<fileview [%d, %d] %r>' % (
+ self._start, self._end, self._fileobj)
+
+ def tell(self):
+ return self._pos
+
+ def _checkwindow(self, seekto, op):
+ if not (self._start <= seekto <= self._end):
+ raise IOError("%s to offset %d is outside window [%d, %d]" % (
+ op, seekto, self._start, self._end))
+
+ def seek(self, offset, whence=0):
+ seekto = offset
+ if whence == os.SEEK_SET:
+ seekto += self._start
+ elif whence == os.SEEK_CUR:
+ seekto += self._start + self._pos
+ elif whence == os.SEEK_END:
+ seekto += self._end
+ else:
+ raise IOError("Invalid whence argument to seek: %r" % (whence,))
+ self._checkwindow(seekto, 'seek')
+ self._fileobj.seek(seekto)
+ self._pos = seekto - self._start
+
+ def write(self, bytes):
+ here = self._start + self._pos
+ self._checkwindow(here, 'write')
+ self._checkwindow(here + len(bytes), 'write')
+ self._fileobj.seek(here, os.SEEK_SET)
+ self._fileobj.write(bytes)
+ self._pos += len(bytes)
+
+ def read(self, size=maxint):
+ assert size >= 0
+ here = self._start + self._pos
+ self._checkwindow(here, 'read')
+ size = min(size, self._end - here)
+ self._fileobj.seek(here, os.SEEK_SET)
+ bytes = self._fileobj.read(size)
+ self._pos += len(bytes)
+ return bytes
+
+
+def read_data(file, endian, num=1):
+ """
+ Read a given number of 32-bits unsigned integers from the given file
+ with the given endianness.
+ """
+ res = struct.unpack(endian + 'L' * num, file.read(num * 4))
+ if len(res) == 1:
+ return res[0]
+ return res
+
+
+def mach_o_change(path, what, value):
+ """
+ Replace a given name (what) in any LC_LOAD_DYLIB command found in
+ the given binary with a new name (value), provided it's shorter.
+ """
+
+ def do_macho(file, bits, endian):
+ # Read Mach-O header (the magic number is assumed read by the caller)
+ cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = read_data(file, endian, 6)
+ # 64-bits header has one more field.
+ if bits == 64:
+ read_data(file, endian)
+ # The header is followed by ncmds commands
+ for n in range(ncmds):
+ where = file.tell()
+ # Read command header
+ cmd, cmdsize = read_data(file, endian, 2)
+ if cmd == LC_LOAD_DYLIB:
+ # The first data field in LC_LOAD_DYLIB commands is the
+ # offset of the name, starting from the beginning of the
+ # command.
+ name_offset = read_data(file, endian)
+ file.seek(where + name_offset, os.SEEK_SET)
+ # Read the NUL terminated string
+ load = file.read(cmdsize - name_offset).decode()
+ load = load[:load.index('\0')]
+ # If the string is what is being replaced, overwrite it.
+ if load == what:
+ file.seek(where + name_offset, os.SEEK_SET)
+ file.write(value.encode() + '\0'.encode())
+ # Seek to the next command
+ file.seek(where + cmdsize, os.SEEK_SET)
+
+ def do_file(file, offset=0, size=maxint):
+ file = fileview(file, offset, size)
+ # Read magic number
+ magic = read_data(file, BIG_ENDIAN)
+ if magic == FAT_MAGIC:
+ # Fat binaries contain nfat_arch Mach-O binaries
+ nfat_arch = read_data(file, BIG_ENDIAN)
+ for n in range(nfat_arch):
+ # Read arch header
+ cputype, cpusubtype, offset, size, align = read_data(file, BIG_ENDIAN, 5)
+ do_file(file, offset, size)
+ elif magic == MH_MAGIC:
+ do_macho(file, 32, BIG_ENDIAN)
+ elif magic == MH_CIGAM:
+ do_macho(file, 32, LITTLE_ENDIAN)
+ elif magic == MH_MAGIC_64:
+ do_macho(file, 64, BIG_ENDIAN)
+ elif magic == MH_CIGAM_64:
+ do_macho(file, 64, LITTLE_ENDIAN)
+
+ assert(len(what) >= len(value))
+
+ with open(path, 'r+b') as f:
+ do_file(f)
+
+
+if __name__ == '__main__':
+ main()
+
+# TODO:
+# Copy python.exe.manifest
+# Monkeypatch distutils.sysconfig
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.bat b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.bat
new file mode 100644
index 000000000..529b9733c
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.bat
@@ -0,0 +1,30 @@
+@echo off
+set "VIRTUAL_ENV=__VIRTUAL_ENV__"
+
+if defined _OLD_VIRTUAL_PROMPT (
+ set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+) else (
+ if not defined PROMPT (
+ set "PROMPT=$P$G"
+ )
+ set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
+)
+set "PROMPT=__VIRTUAL_WINPROMPT__ %PROMPT%"
+
+REM Don't use () to avoid problems with them in %PATH%
+if defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
+ set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
+:ENDIFVHOME
+
+set PYTHONHOME=
+
+REM if defined _OLD_VIRTUAL_PATH (
+if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH1
+ set "PATH=%_OLD_VIRTUAL_PATH%"
+:ENDIFVPATH1
+REM ) else (
+if defined _OLD_VIRTUAL_PATH goto ENDIFVPATH2
+ set "_OLD_VIRTUAL_PATH=%PATH%"
+:ENDIFVPATH2
+
+set "PATH=%VIRTUAL_ENV%\__BIN_NAME__;%PATH%"
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.csh b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.csh
new file mode 100644
index 000000000..864865b17
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.csh
@@ -0,0 +1,36 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+
+
+
+if ("__VIRTUAL_PROMPT__" != "") then
+ set env_name = "__VIRTUAL_PROMPT__"
+else
+ set env_name = `basename "$VIRTUAL_ENV"`
+endif
+
+# Could be in a non-interactive environment,
+# in which case, $prompt is undefined and we wouldn't
+# care about the prompt anyway.
+if ( $?prompt ) then
+ set _OLD_VIRTUAL_PROMPT="$prompt"
+ set prompt = "[$env_name] $prompt"
+endif
+
+unset env_name
+
+alias pydoc python -m pydoc
+
+rehash
+
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.fish b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.fish
new file mode 100644
index 000000000..f3d1797a3
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.fish
@@ -0,0 +1,76 @@
+# This file must be used using `. bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
+# Do not run it directly.
+
+function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
+ set -l fish_function_path
+
+ # Erase virtualenv's `fish_prompt` and restore the original.
+ functions -e fish_prompt
+ functions -c _old_fish_prompt fish_prompt
+ functions -e _old_fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ end
+
+ set -e VIRTUAL_ENV
+
+ if test "$argv[1]" != 'nondestructive'
+ # Self-destruct!
+ functions -e pydoc
+ functions -e deactivate
+ end
+end
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "__VIRTUAL_ENV__"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/__BIN_NAME__" $PATH
+
+# Unset `$PYTHONHOME` if set.
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+function pydoc
+ python -m pydoc $argv
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # Copy the current `fish_prompt` function as `_old_fish_prompt`.
+ functions -c fish_prompt _old_fish_prompt
+
+ function fish_prompt
+ # Save the current $status, for fish_prompts that display it.
+ set -l old_status $status
+
+ # Prompt override provided?
+ # If not, just prepend the environment name.
+ if test -n "__VIRTUAL_PROMPT__"
+ printf '%s%s' "__VIRTUAL_PROMPT__" (set_color normal)
+ else
+ printf '%s(%s%s%s) ' (set_color normal) (set_color -o white) (basename "$VIRTUAL_ENV") (set_color normal)
+ end
+
+ # Restore the original $status
+ echo "exit $old_status" | source
+ _old_fish_prompt
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.ps1 b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.ps1
new file mode 100644
index 000000000..0f4adf19f
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.ps1
@@ -0,0 +1,150 @@
+# This file must be dot sourced from PoSh; you cannot run it
+# directly. Do this: . ./activate.ps1
+
+# FIXME: clean up unused vars.
+$script:THIS_PATH = $myinvocation.mycommand.path
+$script:BASE_DIR = split-path (resolve-path "$THIS_PATH/..") -Parent
+$script:DIR_NAME = split-path $BASE_DIR -Leaf
+
+function global:deactivate ( [switch] $NonDestructive ){
+
+ if ( test-path variable:_OLD_VIRTUAL_PATH ) {
+ $env:PATH = $variable:_OLD_VIRTUAL_PATH
+ remove-variable "_OLD_VIRTUAL_PATH" -scope global
+ }
+
+ if ( test-path function:_old_virtual_prompt ) {
+ $function:prompt = $function:_old_virtual_prompt
+ remove-item function:\_old_virtual_prompt
+ }
+
+ if ($env:VIRTUAL_ENV) {
+ $old_env = split-path $env:VIRTUAL_ENV -leaf
+ remove-item env:VIRTUAL_ENV -erroraction silentlycontinue
+ }
+
+ if ( !$NonDestructive ) {
+ # Self destruct!
+ remove-item function:deactivate
+ }
+}
+
+# unset irrelevant variables
+deactivate -nondestructive
+
+$VIRTUAL_ENV = $BASE_DIR
+$env:VIRTUAL_ENV = $VIRTUAL_ENV
+
+$global:_OLD_VIRTUAL_PATH = $env:PATH
+$env:PATH = "$env:VIRTUAL_ENV/Scripts;" + $env:PATH
+if (! $env:VIRTUAL_ENV_DISABLE_PROMPT) {
+ function global:_old_virtual_prompt { "" }
+ $function:_old_virtual_prompt = $function:prompt
+ function global:prompt {
+ # Add a prefix to the current prompt, but don't discard it.
+ write-host "($(split-path $env:VIRTUAL_ENV -leaf)) " -nonewline
+ & $function:_old_virtual_prompt
+ }
+}
+
+# SIG # Begin signature block
+# MIISeAYJKoZIhvcNAQcCoIISaTCCEmUCAQExCzAJBgUrDgMCGgUAMGkGCisGAQQB
+# gjcCAQSgWzBZMDQGCisGAQQBgjcCAR4wJgIDAQAABBAfzDtgWUsITrck0sYpfvNR
+# AgEAAgEAAgEAAgEAAgEAMCEwCQYFKw4DAhoFAAQUS5reBwSg3zOUwhXf2jPChZzf
+# yPmggg6tMIIGcDCCBFigAwIBAgIBJDANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQG
+# EwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERp
+# Z2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2Vy
+# dGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDcxMDI0MjIwMTQ2WhcNMTcxMDI0MjIw
+# MTQ2WjCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0ZC4xKzAp
+# BgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxODA2BgNV
+# BAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUgT2JqZWN0
+# IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyiOLIjUemqAbPJ1J
+# 0D8MlzgWKbr4fYlbRVjvhHDtfhFN6RQxq0PjTQxRgWzwFQNKJCdU5ftKoM5N4YSj
+# Id6ZNavcSa6/McVnhDAQm+8H3HWoD030NVOxbjgD/Ih3HaV3/z9159nnvyxQEckR
+# ZfpJB2Kfk6aHqW3JnSvRe+XVZSufDVCe/vtxGSEwKCaNrsLc9pboUoYIC3oyzWoU
+# TZ65+c0H4paR8c8eK/mC914mBo6N0dQ512/bkSdaeY9YaQpGtW/h/W/FkbQRT3sC
+# pttLVlIjnkuY4r9+zvqhToPjxcfDYEf+XD8VGkAqle8Aa8hQ+M1qGdQjAye8OzbV
+# uUOw7wIDAQABo4IB6TCCAeUwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+# AQYwHQYDVR0OBBYEFNBOD0CZbLhLGW87KLjg44gHNKq3MB8GA1UdIwQYMBaAFE4L
+# 7xqkQFulF2mHMMo0aEPQQa7yMD0GCCsGAQUFBwEBBDEwLzAtBggrBgEFBQcwAoYh
+# aHR0cDovL3d3dy5zdGFydHNzbC5jb20vc2ZzY2EuY3J0MFsGA1UdHwRUMFIwJ6Al
+# oCOGIWh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3Nmc2NhLmNybDAnoCWgI4YhaHR0
+# cDovL2NybC5zdGFydHNzbC5jb20vc2ZzY2EuY3JsMIGABgNVHSAEeTB3MHUGCysG
+# AQQBgbU3AQIBMGYwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29t
+# L3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29t
+# L2ludGVybWVkaWF0ZS5wZGYwEQYJYIZIAYb4QgEBBAQDAgABMFAGCWCGSAGG+EIB
+# DQRDFkFTdGFydENvbSBDbGFzcyAyIFByaW1hcnkgSW50ZXJtZWRpYXRlIE9iamVj
+# dCBTaWduaW5nIENlcnRpZmljYXRlczANBgkqhkiG9w0BAQUFAAOCAgEAcnMLA3Va
+# N4OIE9l4QT5OEtZy5PByBit3oHiqQpgVEQo7DHRsjXD5H/IyTivpMikaaeRxIv95
+# baRd4hoUcMwDj4JIjC3WA9FoNFV31SMljEZa66G8RQECdMSSufgfDYu1XQ+cUKxh
+# D3EtLGGcFGjjML7EQv2Iol741rEsycXwIXcryxeiMbU2TPi7X3elbwQMc4JFlJ4B
+# y9FhBzuZB1DV2sN2irGVbC3G/1+S2doPDjL1CaElwRa/T0qkq2vvPxUgryAoCppU
+# FKViw5yoGYC+z1GaesWWiP1eFKAL0wI7IgSvLzU3y1Vp7vsYaxOVBqZtebFTWRHt
+# XjCsFrrQBngt0d33QbQRI5mwgzEp7XJ9xu5d6RVWM4TPRUsd+DDZpBHm9mszvi9g
+# VFb2ZG7qRRXCSqys4+u/NLBPbXi/m/lU00cODQTlC/euwjk9HQtRrXQ/zqsBJS6U
+# J+eLGw1qOfj+HVBl/ZQpfoLk7IoWlRQvRL1s7oirEaqPZUIWY/grXq9r6jDKAp3L
+# ZdKQpPOnnogtqlU4f7/kLjEJhrrc98mrOWmVMK/BuFRAfQ5oDUMnVmCzAzLMjKfG
+# cVW/iMew41yfhgKbwpfzm3LBr1Zv+pEBgcgW6onRLSAn3XHM0eNtz+AkxH6rRf6B
+# 2mYhLEEGLapH8R1AMAo4BbVFOZR5kXcMCwowggg1MIIHHaADAgECAgIEuDANBgkq
+# hkiG9w0BAQUFADCBjDELMAkGA1UEBhMCSUwxFjAUBgNVBAoTDVN0YXJ0Q29tIEx0
+# ZC4xKzApBgNVBAsTIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcx
+# ODA2BgNVBAMTL1N0YXJ0Q29tIENsYXNzIDIgUHJpbWFyeSBJbnRlcm1lZGlhdGUg
+# T2JqZWN0IENBMB4XDTExMTIwMzE1MzQxOVoXDTEzMTIwMzE0NTgwN1owgYwxIDAe
+# BgNVBA0TFzU4MTc5Ni1HaDd4Zkp4a3hRU0lPNEUwMQswCQYDVQQGEwJERTEPMA0G
+# A1UECBMGQmVybGluMQ8wDQYDVQQHEwZCZXJsaW4xFjAUBgNVBAMTDUphbm5pcyBM
+# ZWlkZWwxITAfBgkqhkiG9w0BCQEWEmphbm5pc0BsZWlkZWwuaW5mbzCCAiIwDQYJ
+# KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMcPeABYdN7nPq/AkZ/EkyUBGx/l2Yui
+# Lfm8ZdLG0ulMb/kQL3fRY7sUjYPyn9S6PhqqlFnNoGHJvbbReCdUC9SIQYmOEjEA
+# raHfb7MZU10NjO4U2DdGucj2zuO5tYxKizizOJF0e4yRQZVxpUGdvkW/+GLjCNK5
+# L7mIv3Z1dagxDKHYZT74HXiS4VFUwHF1k36CwfM2vsetdm46bdgSwV+BCMmZICYT
+# IJAS9UQHD7kP4rik3bFWjUx08NtYYFAVOd/HwBnemUmJe4j3IhZHr0k1+eDG8hDH
+# KVvPgLJIoEjC4iMFk5GWsg5z2ngk0LLu3JZMtckHsnnmBPHQK8a3opUNd8hdMNJx
+# gOwKjQt2JZSGUdIEFCKVDqj0FmdnDMPfwy+FNRtpBMl1sz78dUFhSrnM0D8NXrqa
+# 4rG+2FoOXlmm1rb6AFtpjAKksHRpYcPk2DPGWp/1sWB+dUQkS3gOmwFzyqeTuXpT
+# 0juqd3iAxOGx1VRFQ1VHLLf3AzV4wljBau26I+tu7iXxesVucSdsdQu293jwc2kN
+# xK2JyHCoZH+RyytrwS0qw8t7rMOukU9gwP8mn3X6mgWlVUODMcHTULjSiCEtvyZ/
+# aafcwjUbt4ReEcnmuZtWIha86MTCX7U7e+cnpWG4sIHPnvVTaz9rm8RyBkIxtFCB
+# nQ3FnoQgyxeJAgMBAAGjggOdMIIDmTAJBgNVHRMEAjAAMA4GA1UdDwEB/wQEAwIH
+# gDAuBgNVHSUBAf8EJDAiBggrBgEFBQcDAwYKKwYBBAGCNwIBFQYKKwYBBAGCNwoD
+# DTAdBgNVHQ4EFgQUWyCgrIWo8Ifvvm1/YTQIeMU9nc8wHwYDVR0jBBgwFoAU0E4P
+# QJlsuEsZbzsouODjiAc0qrcwggIhBgNVHSAEggIYMIICFDCCAhAGCysGAQQBgbU3
+# AQICMIIB/zAuBggrBgEFBQcCARYiaHR0cDovL3d3dy5zdGFydHNzbC5jb20vcG9s
+# aWN5LnBkZjA0BggrBgEFBQcCARYoaHR0cDovL3d3dy5zdGFydHNzbC5jb20vaW50
+# ZXJtZWRpYXRlLnBkZjCB9wYIKwYBBQUHAgIwgeowJxYgU3RhcnRDb20gQ2VydGlm
+# aWNhdGlvbiBBdXRob3JpdHkwAwIBARqBvlRoaXMgY2VydGlmaWNhdGUgd2FzIGlz
+# c3VlZCBhY2NvcmRpbmcgdG8gdGhlIENsYXNzIDIgVmFsaWRhdGlvbiByZXF1aXJl
+# bWVudHMgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeSwgcmVsaWFuY2Ugb25seSBm
+# b3IgdGhlIGludGVuZGVkIHB1cnBvc2UgaW4gY29tcGxpYW5jZSBvZiB0aGUgcmVs
+# eWluZyBwYXJ0eSBvYmxpZ2F0aW9ucy4wgZwGCCsGAQUFBwICMIGPMCcWIFN0YXJ0
+# Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MAMCAQIaZExpYWJpbGl0eSBhbmQg
+# d2FycmFudGllcyBhcmUgbGltaXRlZCEgU2VlIHNlY3Rpb24gIkxlZ2FsIGFuZCBM
+# aW1pdGF0aW9ucyIgb2YgdGhlIFN0YXJ0Q29tIENBIHBvbGljeS4wNgYDVR0fBC8w
+# LTAroCmgJ4YlaHR0cDovL2NybC5zdGFydHNzbC5jb20vY3J0YzItY3JsLmNybDCB
+# iQYIKwYBBQUHAQEEfTB7MDcGCCsGAQUFBzABhitodHRwOi8vb2NzcC5zdGFydHNz
+# bC5jb20vc3ViL2NsYXNzMi9jb2RlL2NhMEAGCCsGAQUFBzAChjRodHRwOi8vYWlh
+# LnN0YXJ0c3NsLmNvbS9jZXJ0cy9zdWIuY2xhc3MyLmNvZGUuY2EuY3J0MCMGA1Ud
+# EgQcMBqGGGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tLzANBgkqhkiG9w0BAQUFAAOC
+# AQEAhrzEV6zwoEtKjnFRhCsjwiPykVpo5Eiye77Ve801rQDiRKgSCCiW6g3HqedL
+# OtaSs65Sj2pm3Viea4KR0TECLcbCTgsdaHqw2x1yXwWBQWZEaV6EB05lIwfr94P1
+# SFpV43zkuc+bbmA3+CRK45LOcCNH5Tqq7VGTCAK5iM7tvHwFlbQRl+I6VEL2mjpF
+# NsuRjDOVrv/9qw/a22YJ9R7Y1D0vUSs3IqZx2KMUaYDP7H2mSRxJO2nADQZBtriF
+# gTyfD3lYV12MlIi5CQwe3QC6DrrfSMP33i5Wa/OFJiQ27WPxmScYVhiqozpImFT4
+# PU9goiBv9RKXdgTmZE1PN0NQ5jGCAzUwggMxAgEBMIGTMIGMMQswCQYDVQQGEwJJ
+# TDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0
+# YWwgQ2VydGlmaWNhdGUgU2lnbmluZzE4MDYGA1UEAxMvU3RhcnRDb20gQ2xhc3Mg
+# MiBQcmltYXJ5IEludGVybWVkaWF0ZSBPYmplY3QgQ0ECAgS4MAkGBSsOAwIaBQCg
+# eDAYBgorBgEEAYI3AgEMMQowCKACgAChAoAAMBkGCSqGSIb3DQEJAzEMBgorBgEE
+# AYI3AgEEMBwGCisGAQQBgjcCAQsxDjAMBgorBgEEAYI3AgEVMCMGCSqGSIb3DQEJ
+# BDEWBBRVGw0FDSiaIi38dWteRUAg/9Pr6DANBgkqhkiG9w0BAQEFAASCAgCInvOZ
+# FdaNFzbf6trmFDZKMojyx3UjKMCqNjHVBbuKY0qXwFC/ElYDV1ShJ2CBZbdurydO
+# OQ6cIQ0KREOCwmX/xB49IlLHHUxNhEkVv7HGU3EKAFf9IBt9Yr7jikiR9cjIsfHK
+# 4cjkoKJL7g28yEpLLkHt1eo37f1Ga9lDWEa5Zq3U5yX+IwXhrUBm1h8Xr033FhTR
+# VEpuSz6LHtbrL/zgJnCzJ2ahjtJoYevdcWiNXffosJHFaSfYDDbiNsPRDH/1avmb
+# 5j/7BhP8BcBaR6Fp8tFbNGIcWHHGcjqLMnTc4w13b7b4pDhypqElBa4+lCmwdvv9
+# GydYtRgPz8GHeoBoKj30YBlMzRIfFYaIFGIC4Ai3UEXkuH9TxYohVbGm/W0Kl4Lb
+# RJ1FwiVcLcTOJdgNId2vQvKc+jtNrjcg5SP9h2v/C4aTx8tyc6tE3TOPh2f9b8DL
+# S+SbVArJpuJqrPTxDDoO1QNjTgLcdVYeZDE+r/NjaGZ6cMSd8db3EaG3ijD/0bud
+# SItbm/OlNVbQOFRR76D+ZNgPcU5iNZ3bmvQQIg6aSB9MHUpIE/SeCkNl9YeVk1/1
+# GFULgNMRmIYP4KLvu9ylh5Gu3hvD5VNhH6+FlXANwFy07uXks5uF8mfZVxVCnodG
+# xkNCx+6PsrA5Z7WP4pXcmYnMn97npP/Q9EHJWw==
+# SIG # End signature block
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.sh b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.sh
new file mode 100644
index 000000000..477b7eca2
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate.sh
@@ -0,0 +1,78 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+ unset -f pydoc >/dev/null 2>&1
+
+ # reset old environment variables
+ # ! [ -z ${VAR+_} ] returns true if VAR is declared at all
+ if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
+ PATH="$_OLD_VIRTUAL_PATH"
+ export PATH
+ unset _OLD_VIRTUAL_PATH
+ fi
+ if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
+ PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+ export PYTHONHOME
+ unset _OLD_VIRTUAL_PYTHONHOME
+ fi
+
+ # This should detect bash and zsh, which have a hash command that must
+ # be called to get it to forget past commands. Without forgetting
+ # past commands the $PATH changes we made may not be respected
+ if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
+ hash -r 2>/dev/null
+ fi
+
+ if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
+ PS1="$_OLD_VIRTUAL_PS1"
+ export PS1
+ unset _OLD_VIRTUAL_PS1
+ fi
+
+ unset VIRTUAL_ENV
+ if [ ! "${1-}" = "nondestructive" ] ; then
+ # Self destruct!
+ unset -f deactivate
+ fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="__VIRTUAL_ENV__"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/__BIN_NAME__:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+if ! [ -z "${PYTHONHOME+_}" ] ; then
+ _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+ unset PYTHONHOME
+fi
+
+if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
+ _OLD_VIRTUAL_PS1="$PS1"
+ if [ "x__VIRTUAL_PROMPT__" != x ] ; then
+ PS1="__VIRTUAL_PROMPT__$PS1"
+ else
+ PS1="(`basename \"$VIRTUAL_ENV\"`) $PS1"
+ fi
+ export PS1
+fi
+
+# Make sure to unalias pydoc if it's already there
+alias pydoc 2>/dev/null >/dev/null && unalias pydoc
+
+pydoc () {
+ python -m pydoc "$@"
+}
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands. Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
+ hash -r 2>/dev/null
+fi
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate_this.py b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate_this.py
new file mode 100644
index 000000000..f18193bf8
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/activate_this.py
@@ -0,0 +1,34 @@
+"""By using execfile(this_file, dict(__file__=this_file)) you will
+activate this virtualenv environment.
+
+This can be used when you must use an existing Python interpreter, not
+the virtualenv bin/python
+"""
+
+try:
+ __file__
+except NameError:
+ raise AssertionError(
+ "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
+import sys
+import os
+
+old_os_path = os.environ.get('PATH', '')
+os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
+base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+if sys.platform == 'win32':
+ site_packages = os.path.join(base, 'Lib', 'site-packages')
+else:
+ site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
+prev_sys_path = list(sys.path)
+import site
+site.addsitedir(site_packages)
+sys.real_prefix = sys.prefix
+sys.prefix = base
+# Move the added items to the front of the path:
+new_sys_path = []
+for item in list(sys.path):
+ if item not in prev_sys_path:
+ new_sys_path.append(item)
+ sys.path.remove(item)
+sys.path[:0] = new_sys_path
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/deactivate.bat b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/deactivate.bat
new file mode 100644
index 000000000..9228d3171
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/deactivate.bat
@@ -0,0 +1,19 @@
+@echo off
+
+set VIRTUAL_ENV=
+
+REM Don't use () to avoid problems with them in %PATH%
+if not defined _OLD_VIRTUAL_PROMPT goto ENDIFVPROMPT
+ set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
+ set _OLD_VIRTUAL_PROMPT=
+:ENDIFVPROMPT
+
+if not defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
+ set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
+ set _OLD_VIRTUAL_PYTHONHOME=
+:ENDIFVHOME
+
+if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH
+ set "PATH=%_OLD_VIRTUAL_PATH%"
+ set _OLD_VIRTUAL_PATH=
+:ENDIFVPATH \ No newline at end of file
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils-init.py b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils-init.py
new file mode 100644
index 000000000..29fc1da45
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils-init.py
@@ -0,0 +1,101 @@
+import os
+import sys
+import warnings
+import imp
+import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
+ # Important! To work on pypy, this must be a module that resides in the
+ # lib-python/modified-x.y.z directory
+
+dirname = os.path.dirname
+
+distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
+if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
+ warnings.warn(
+ "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
+else:
+ __path__.insert(0, distutils_path)
+ real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ('', '', imp.PKG_DIRECTORY))
+ # Copy the relevant attributes
+ try:
+ __revision__ = real_distutils.__revision__
+ except AttributeError:
+ pass
+ __version__ = real_distutils.__version__
+
+from distutils import dist, sysconfig
+
+try:
+ basestring
+except NameError:
+ basestring = str
+
+## patch build_ext (distutils doesn't know how to get the libs directory
+## path on windows - it hardcodes the paths around the patched sys.prefix)
+
+if sys.platform == 'win32':
+ from distutils.command.build_ext import build_ext as old_build_ext
+ class build_ext(old_build_ext):
+ def finalize_options (self):
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, basestring):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
+ old_build_ext.finalize_options(self)
+
+ from distutils.command import build_ext as build_ext_module
+ build_ext_module.build_ext = build_ext
+
+## distutils.dist patches:
+
+old_find_config_files = dist.Distribution.find_config_files
+def find_config_files(self):
+ found = old_find_config_files(self)
+ system_distutils = os.path.join(distutils_path, 'distutils.cfg')
+ #if os.path.exists(system_distutils):
+ # found.insert(0, system_distutils)
+ # What to call the per-user config file
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+ user_filename = os.path.join(sys.prefix, user_filename)
+ if os.path.isfile(user_filename):
+ for item in list(found):
+ if item.endswith('pydistutils.cfg'):
+ found.remove(item)
+ found.append(user_filename)
+ return found
+dist.Distribution.find_config_files = find_config_files
+
+## distutils.sysconfig patches:
+
+old_get_python_inc = sysconfig.get_python_inc
+def sysconfig_get_python_inc(plat_specific=0, prefix=None):
+ if prefix is None:
+ prefix = sys.real_prefix
+ return old_get_python_inc(plat_specific, prefix)
+sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
+sysconfig.get_python_inc = sysconfig_get_python_inc
+
+old_get_python_lib = sysconfig.get_python_lib
+def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+ if standard_lib and prefix is None:
+ prefix = sys.real_prefix
+ return old_get_python_lib(plat_specific, standard_lib, prefix)
+sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
+sysconfig.get_python_lib = sysconfig_get_python_lib
+
+old_get_config_vars = sysconfig.get_config_vars
+def sysconfig_get_config_vars(*args):
+ real_vars = old_get_config_vars(*args)
+ if sys.platform == 'win32':
+ lib_dir = os.path.join(sys.real_prefix, "libs")
+ if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
+ real_vars['LIBDIR'] = lib_dir # asked for all
+ elif isinstance(real_vars, list) and 'LIBDIR' in args:
+ real_vars = real_vars + [lib_dir] # asked for list
+ return real_vars
+sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
+sysconfig.get_config_vars = sysconfig_get_config_vars
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils.cfg b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils.cfg
new file mode 100644
index 000000000..1af230ec9
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/distutils.cfg
@@ -0,0 +1,6 @@
+# This is a config file local to this virtualenv installation
+# You may include options that will be used by all distutils commands,
+# and by easy_install. For instance:
+#
+# [easy_install]
+# find_links = http://mylocalsite
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/python-config b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/python-config
new file mode 100644
index 000000000..5e7a7c901
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/python-config
@@ -0,0 +1,78 @@
+#!__VIRTUAL_ENV__/__BIN_NAME__/python
+
+import sys
+import getopt
+import sysconfig
+
+valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
+ 'ldflags', 'help']
+
+if sys.version_info >= (3, 2):
+ valid_opts.insert(-1, 'extension-suffix')
+ valid_opts.append('abiflags')
+if sys.version_info >= (3, 3):
+ valid_opts.append('configdir')
+
+
+def exit_with_usage(code=1):
+ sys.stderr.write("Usage: {0} [{1}]\n".format(
+ sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
+ sys.exit(code)
+
+try:
+ opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
+except getopt.error:
+ exit_with_usage()
+
+if not opts:
+ exit_with_usage()
+
+pyver = sysconfig.get_config_var('VERSION')
+getvar = sysconfig.get_config_var
+
+opt_flags = [flag for (flag, val) in opts]
+
+if '--help' in opt_flags:
+ exit_with_usage(code=0)
+
+for opt in opt_flags:
+ if opt == '--prefix':
+ print(sysconfig.get_config_var('prefix'))
+
+ elif opt == '--exec-prefix':
+ print(sysconfig.get_config_var('exec_prefix'))
+
+ elif opt in ('--includes', '--cflags'):
+ flags = ['-I' + sysconfig.get_path('include'),
+ '-I' + sysconfig.get_path('platinclude')]
+ if opt == '--cflags':
+ flags.extend(getvar('CFLAGS').split())
+ print(' '.join(flags))
+
+ elif opt in ('--libs', '--ldflags'):
+ abiflags = getattr(sys, 'abiflags', '')
+ libs = ['-lpython' + pyver + abiflags]
+ libs += getvar('LIBS').split()
+ libs += getvar('SYSLIBS').split()
+ # add the prefix/lib/pythonX.Y/config dir, but only if there is no
+ # shared library in prefix/lib/.
+ if opt == '--ldflags':
+ if not getvar('Py_ENABLE_SHARED'):
+ libs.insert(0, '-L' + getvar('LIBPL'))
+ if not getvar('PYTHONFRAMEWORK'):
+ libs.extend(getvar('LINKFORSHARED').split())
+ print(' '.join(libs))
+
+ elif opt == '--extension-suffix':
+ ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
+ if ext_suffix is None:
+ ext_suffix = sysconfig.get_config_var('SO')
+ print(ext_suffix)
+
+ elif opt == '--abiflags':
+ if not getattr(sys, 'abiflags', None):
+ exit_with_usage()
+ print(sys.abiflags)
+
+ elif opt == '--configdir':
+ print(sysconfig.get_config_var('LIBPL'))
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/site.py b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/site.py
new file mode 100644
index 000000000..7969769c3
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_embedded/site.py
@@ -0,0 +1,758 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code. Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path. On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages. On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely). The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path. Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once. Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth. Assume foo.pth contains the
+following:
+
+ # foo package configuration
+ foo
+ bar
+ bletch
+
+and bar.pth contains:
+
+ # bar package configuration
+ bar
+
+Then the following directories are added to sys.path, in this order:
+
+ /usr/local/lib/python2.X/site-packages/bar
+ /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations. If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+try:
+ import __builtin__ as builtins
+except ImportError:
+ import builtins
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+ ModuleType = type(os)
+
+def makepath(*paths):
+ dir = os.path.join(*paths)
+ if _is_jython and (dir == '__classpath__' or
+ dir.startswith('__pyclasspath__')):
+ return dir, dir
+ dir = os.path.abspath(dir)
+ return dir, os.path.normcase(dir)
+
+def abs__file__():
+ """Set all module' __file__ attribute to an absolute path"""
+ for m in sys.modules.values():
+ if ((_is_jython and not isinstance(m, ModuleType)) or
+ hasattr(m, '__loader__')):
+ # only modules need the abspath in Jython. and don't mess
+ # with a PEP 302-supplied __file__
+ continue
+ f = getattr(m, '__file__', None)
+ if f is None:
+ continue
+ m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+ """ Remove duplicate entries from sys.path along with making them
+ absolute"""
+ # This ensures that the initial path provided by the interpreter contains
+ # only absolute pathnames, even if we're running from the build directory.
+ L = []
+ known_paths = set()
+ for dir in sys.path:
+ # Filter out duplicate paths (on case-insensitive file systems also
+ # if they only differ in case); turn relative paths into absolute
+ # paths.
+ dir, dircase = makepath(dir)
+ if not dircase in known_paths:
+ L.append(dir)
+ known_paths.add(dircase)
+ sys.path[:] = L
+ return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python. See http://www.python.org/sf/586680
+def addbuilddir():
+ """Append ./build/lib.<platform> in case we're running in the build dir
+ (especially for Guido :-)"""
+ from distutils.util import get_platform
+ s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+ if hasattr(sys, 'gettotalrefcount'):
+ s += '-pydebug'
+ s = os.path.join(os.path.dirname(sys.path[-1]), s)
+ sys.path.append(s)
+
+def _init_pathinfo():
+ """Return a set containing all existing directory entries from sys.path"""
+ d = set()
+ for dir in sys.path:
+ try:
+ if os.path.isdir(dir):
+ dir, dircase = makepath(dir)
+ d.add(dircase)
+ except TypeError:
+ continue
+ return d
+
+def addpackage(sitedir, name, known_paths):
+ """Add a new path to known_paths by combining sitedir and 'name' or execute
+ sitedir if it starts with 'import'"""
+ if known_paths is None:
+ _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ fullname = os.path.join(sitedir, name)
+ try:
+ f = open(fullname, "rU")
+ except IOError:
+ return
+ try:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ if line.startswith("import"):
+ exec(line)
+ continue
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if not dircase in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ finally:
+ f.close()
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+ """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+ 'sitedir'"""
+ if known_paths is None:
+ known_paths = _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ sitedir, sitedircase = makepath(sitedir)
+ if not sitedircase in known_paths:
+ sys.path.append(sitedir) # Add path component
+ try:
+ names = os.listdir(sitedir)
+ except os.error:
+ return
+ names.sort()
+ for name in names:
+ if name.endswith(os.extsep + "pth"):
+ addpackage(sitedir, name, known_paths)
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+ """Add site-packages (and possibly site-python) to sys.path"""
+ prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+ if exec_prefix != sys_prefix:
+ prefixes.append(os.path.join(exec_prefix, "local"))
+
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos') or _is_jython:
+ sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+ elif _is_pypy:
+ sitedirs = [os.path.join(prefix, 'site-packages')]
+ elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+ if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+ sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(prefix, "Extras", "lib", "python")]
+
+ else: # any other Python distros on OSX work this way
+ sitedirs = [os.path.join(prefix, "lib",
+ "python" + sys.version[:3], "site-packages")]
+
+ elif os.sep == '/':
+ sitedirs = [os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python"),
+ os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+ lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+ if (os.path.exists(lib64_dir) and
+ os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+ if _is_64bit:
+ sitedirs.insert(0, lib64_dir)
+ else:
+ sitedirs.append(lib64_dir)
+ try:
+ # sys.getobjects only available in --with-pydebug build
+ sys.getobjects
+ sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+ except AttributeError:
+ pass
+ # Debian-specific dist-packages directories:
+ sitedirs.append(os.path.join(prefix, "local/lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ if sys.version[0] == '2':
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ else:
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[0],
+ "dist-packages"))
+ sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+ else:
+ sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for sitedir in sitedirs:
+ if os.path.isdir(sitedir):
+ addsitedir(sitedir, known_paths)
+ return None
+
+def check_enableusersite():
+ """Check if user site directory is safe for inclusion
+
+ The function tests for the command line flag (including environment var),
+ process uid/gid equal to effective uid/gid.
+
+ None: Disabled for security reasons
+ False: Disabled by user (command line option)
+ True: Safe and enabled
+ """
+ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+ return False
+
+ if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+ # check process uid == effective uid
+ if os.geteuid() != os.getuid():
+ return None
+ if hasattr(os, "getgid") and hasattr(os, "getegid"):
+ # check process gid == effective gid
+ if os.getegid() != os.getgid():
+ return None
+
+ return True
+
+def addusersitepackages(known_paths):
+ """Add a per user site-package to sys.path
+
+ Each user has its own python directory with site-packages in the
+ home directory.
+
+ USER_BASE is the root directory for all Python versions
+
+ USER_SITE is the user specific site-packages directory
+
+ USER_SITE/.. can be used for data.
+ """
+ global USER_BASE, USER_SITE, ENABLE_USER_SITE
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ #if sys.platform in ('os2emx', 'riscos'):
+ # # Don't know what to put here
+ # USER_BASE = ''
+ # USER_SITE = ''
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser(base, "Python")
+ USER_SITE = os.path.join(USER_BASE,
+ "Python" + sys.version[0] + sys.version[2],
+ "site-packages")
+ else:
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser("~", ".local")
+ USER_SITE = os.path.join(USER_BASE, "lib",
+ "python" + sys.version[:3],
+ "site-packages")
+
+ if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+ addsitedir(USER_SITE, known_paths)
+ if ENABLE_USER_SITE:
+ for dist_libdir in ("lib", "local/lib"):
+ user_site = os.path.join(USER_BASE, dist_libdir,
+ "python" + sys.version[:3],
+ "dist-packages")
+ if os.path.isdir(user_site):
+ addsitedir(user_site, known_paths)
+ return known_paths
+
+
+
+def setBEGINLIBPATH():
+ """The OS/2 EMX port has optional extension modules that do double duty
+ as DLLs (and must use the .DLL file extension) for other extensions.
+ The library search path needs to be amended so these will be found
+ during module import. Use BEGINLIBPATH so that these are at the start
+ of the library search path.
+
+ """
+ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+ libpath = os.environ['BEGINLIBPATH'].split(';')
+ if libpath[-1]:
+ libpath.append(dllpath)
+ else:
+ libpath[-1] = dllpath
+ os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+ """Define new built-ins 'quit' and 'exit'.
+ These are simply strings that display a hint on how to exit.
+
+ """
+ if os.sep == ':':
+ eof = 'Cmd-Q'
+ elif os.sep == '\\':
+ eof = 'Ctrl-Z plus Return'
+ else:
+ eof = 'Ctrl-D (i.e. EOF)'
+
+ class Quitter(object):
+ def __init__(self, name):
+ self.name = name
+ def __repr__(self):
+ return 'Use %s() or %s to exit' % (self.name, eof)
+ def __call__(self, code=None):
+ # Shells like IDLE catch the SystemExit, but listen when their
+ # stdin wrapper is closed.
+ try:
+ sys.stdin.close()
+ except:
+ pass
+ raise SystemExit(code)
+ builtins.quit = Quitter('quit')
+ builtins.exit = Quitter('exit')
+
+
+class _Printer(object):
+ """interactive prompt objects for printing the license text, a list of
+ contributors and the copyright notice."""
+
+ MAXLINES = 23
+
+ def __init__(self, name, data, files=(), dirs=()):
+ self.__name = name
+ self.__data = data
+ self.__files = files
+ self.__dirs = dirs
+ self.__lines = None
+
+ def __setup(self):
+ if self.__lines:
+ return
+ data = None
+ for dir in self.__dirs:
+ for filename in self.__files:
+ filename = os.path.join(dir, filename)
+ try:
+ fp = open(filename, "rU")
+ data = fp.read()
+ fp.close()
+ break
+ except IOError:
+ pass
+ if data:
+ break
+ if not data:
+ data = self.__data
+ self.__lines = data.split('\n')
+ self.__linecnt = len(self.__lines)
+
+ def __repr__(self):
+ self.__setup()
+ if len(self.__lines) <= self.MAXLINES:
+ return "\n".join(self.__lines)
+ else:
+ return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+ def __call__(self):
+ self.__setup()
+ prompt = 'Hit Return for more, or q (and Return) to quit: '
+ lineno = 0
+ while 1:
+ try:
+ for i in range(lineno, lineno + self.MAXLINES):
+ print(self.__lines[i])
+ except IndexError:
+ break
+ else:
+ lineno += self.MAXLINES
+ key = None
+ while key is None:
+ try:
+ key = raw_input(prompt)
+ except NameError:
+ key = input(prompt)
+ if key not in ('', 'q'):
+ key = None
+ if key == 'q':
+ break
+
+def setcopyright():
+ """Set 'copyright' and 'credits' in __builtin__"""
+ builtins.copyright = _Printer("copyright", sys.copyright)
+ if _is_jython:
+ builtins.credits = _Printer(
+ "credits",
+ "Jython is maintained by the Jython developers (www.jython.org).")
+ elif _is_pypy:
+ builtins.credits = _Printer(
+ "credits",
+ "PyPy is maintained by the PyPy developers: http://pypy.org/")
+ else:
+ builtins.credits = _Printer("credits", """\
+ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+ for supporting Python development. See www.python.org for more information.""")
+ here = os.path.dirname(os.__file__)
+ builtins.license = _Printer(
+ "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+ ["LICENSE.txt", "LICENSE"],
+ [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+ """Define the built-in 'help'.
+ This is a wrapper around pydoc.help (with a twist).
+
+ """
+
+ def __repr__(self):
+ return "Type help() for interactive help, " \
+ "or help(object) for help about object."
+ def __call__(self, *args, **kwds):
+ import pydoc
+ return pydoc.help(*args, **kwds)
+
+def sethelper():
+ builtins.help = _Helper()
+
+def aliasmbcs():
+ """On Windows, some default encodings are not provided by Python,
+ while they are always available as "mbcs" in each locale. Make
+ them usable by aliasing to "mbcs" in such a case."""
+ if sys.platform == 'win32':
+ import locale, codecs
+ enc = locale.getdefaultlocale()[1]
+ if enc.startswith('cp'): # "cp***" ?
+ try:
+ codecs.lookup(enc)
+ except LookupError:
+ import encodings
+ encodings._cache[enc] = encodings._unknown
+ encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+ """Set the string encoding used by the Unicode implementation. The
+ default is 'ascii', but if you're willing to experiment, you can
+ change this."""
+ encoding = "ascii" # Default value set by _PyUnicode_Init()
+ if 0:
+ # Enable to support locale aware default string encodings.
+ import locale
+ loc = locale.getdefaultlocale()
+ if loc[1]:
+ encoding = loc[1]
+ if 0:
+ # Enable to switch off string to Unicode coercion and implicit
+ # Unicode to string conversion.
+ encoding = "undefined"
+ if encoding != "ascii":
+ # On Non-Unicode builds this will raise an AttributeError...
+ sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+ """Run custom site specific code, if available."""
+ try:
+ import sitecustomize
+ except ImportError:
+ pass
+
+def virtual_install_main_packages():
+ f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+ sys.real_prefix = f.read().strip()
+ f.close()
+ pos = 2
+ hardcoded_relative_dirs = []
+ if sys.path[0] == '':
+ pos += 1
+ if _is_jython:
+ paths = [os.path.join(sys.real_prefix, 'Lib')]
+ elif _is_pypy:
+ if sys.version_info > (3, 2):
+ cpyver = '%d' % sys.version_info[0]
+ elif sys.pypy_version_info >= (1, 5):
+ cpyver = '%d.%d' % sys.version_info[:2]
+ else:
+ cpyver = '%d.%d.%d' % sys.version_info[:3]
+ paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+ os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+ if sys.pypy_version_info < (1, 9):
+ paths.insert(1, os.path.join(sys.real_prefix,
+ 'lib-python', 'modified-%s' % cpyver))
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ #
+ # This is hardcoded in the Python executable, but relative to sys.prefix:
+ for path in paths[:]:
+ plat_path = os.path.join(path, 'plat-%s' % sys.platform)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ elif sys.platform == 'win32':
+ paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+ else:
+ paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+ hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+ lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+ if os.path.exists(lib64_path):
+ if _is_64bit:
+ paths.insert(0, lib64_path)
+ else:
+ paths.append(lib64_path)
+ # This is hardcoded in the Python executable, but relative to
+ # sys.prefix. Debian change: we need to add the multiarch triplet
+ # here, which is where the real stuff lives. As per PEP 421, in
+ # Python 3.3+, this lives in sys.implementation, while in Python 2.7
+ # it lives in sys.
+ try:
+ arch = getattr(sys, 'implementation', sys)._multiarch
+ except AttributeError:
+ # This is a non-multiarch aware Python. Fallback to the old way.
+ arch = sys.platform
+ plat_path = os.path.join(sys.real_prefix, 'lib',
+ 'python'+sys.version[:3],
+ 'plat-%s' % arch)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # This is hardcoded in the Python executable, but
+ # relative to sys.prefix, so we have to fix up:
+ for path in list(paths):
+ tk_dir = os.path.join(path, 'lib-tk')
+ if os.path.exists(tk_dir):
+ paths.append(tk_dir)
+
+ # These are hardcoded in the Apple's Python executable,
+ # but relative to sys.prefix, so we have to fix them up:
+ if sys.platform == 'darwin':
+ hardcoded_paths = [os.path.join(relative_dir, module)
+ for relative_dir in hardcoded_relative_dirs
+ for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+ for path in hardcoded_paths:
+ if os.path.exists(path):
+ paths.append(path)
+
+ sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+ """
+ Force easy_installed eggs in the global environment to get placed
+ in sys.path after all packages inside the virtualenv. This
+ maintains the "least surprise" result that packages in the
+ virtualenv always mask global packages, never the other way
+ around.
+
+ """
+ egginsert = getattr(sys, '__egginsert', 0)
+ for i, path in enumerate(sys.path):
+ if i > egginsert and path.startswith(sys.prefix):
+ egginsert = i
+ sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+ force_global_eggs_after_local_site_packages()
+ return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+ """Adjust the special classpath sys.path entries for Jython. These
+ entries should follow the base virtualenv lib directories.
+ """
+ paths = []
+ classpaths = []
+ for path in sys.path:
+ if path == '__classpath__' or path.startswith('__pyclasspath__'):
+ classpaths.append(path)
+ else:
+ paths.append(path)
+ sys.path = paths
+ sys.path.extend(classpaths)
+
+def execusercustomize():
+ """Run custom user specific code, if available."""
+ try:
+ import usercustomize
+ except ImportError:
+ pass
+
+
+def main():
+ global ENABLE_USER_SITE
+ virtual_install_main_packages()
+ abs__file__()
+ paths_in_sys = removeduppaths()
+ if (os.name == "posix" and sys.path and
+ os.path.basename(sys.path[-1]) == "Modules"):
+ addbuilddir()
+ if _is_jython:
+ fixclasspath()
+ GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+ if not GLOBAL_SITE_PACKAGES:
+ ENABLE_USER_SITE = False
+ if ENABLE_USER_SITE is None:
+ ENABLE_USER_SITE = check_enableusersite()
+ paths_in_sys = addsitepackages(paths_in_sys)
+ paths_in_sys = addusersitepackages(paths_in_sys)
+ if GLOBAL_SITE_PACKAGES:
+ paths_in_sys = virtual_addsitepackages(paths_in_sys)
+ if sys.platform == 'os2emx':
+ setBEGINLIBPATH()
+ setquit()
+ setcopyright()
+ sethelper()
+ aliasmbcs()
+ setencoding()
+ execsitecustomize()
+ if ENABLE_USER_SITE:
+ execusercustomize()
+ # Remove sys.setdefaultencoding() so that users cannot change the
+ # encoding after initialization. The test for presence is needed when
+ # this module is run as a script, because this code is executed twice.
+ if hasattr(sys, "setdefaultencoding"):
+ del sys.setdefaultencoding
+
+main()
+
+def _script():
+ help = """\
+ %s [--user-base] [--user-site]
+
+ Without arguments print some useful information
+ With arguments print the value of USER_BASE and/or USER_SITE separated
+ by '%s'.
+
+ Exit codes with --user-base or --user-site:
+ 0 - user site directory is enabled
+ 1 - user site directory is disabled by user
+ 2 - uses site directory is disabled by super user
+ or for security reasons
+ >2 - unknown error
+ """
+ args = sys.argv[1:]
+ if not args:
+ print("sys.path = [")
+ for dir in sys.path:
+ print(" %r," % (dir,))
+ print("]")
+ def exists(path):
+ if os.path.isdir(path):
+ return "exists"
+ else:
+ return "doesn't exist"
+ print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
+ print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
+ print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE)
+ sys.exit(0)
+
+ buffer = []
+ if '--user-base' in args:
+ buffer.append(USER_BASE)
+ if '--user-site' in args:
+ buffer.append(USER_SITE)
+
+ if buffer:
+ print(os.pathsep.join(buffer))
+ if ENABLE_USER_SITE:
+ sys.exit(0)
+ elif ENABLE_USER_SITE is False:
+ sys.exit(1)
+ elif ENABLE_USER_SITE is None:
+ sys.exit(2)
+ else:
+ sys.exit(3)
+ else:
+ import textwrap
+ print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+ sys.exit(10)
+
+if __name__ == '__main__':
+ _script()
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_support/__init__.py b/testing/mozharness/external_tools/virtualenv/virtualenv_support/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_support/__init__.py
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl b/testing/mozharness/external_tools/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..dfef51d44
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl
Binary files differ
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl b/testing/mozharness/external_tools/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl
new file mode 100644
index 000000000..cc49227a0
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl
Binary files differ
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl b/testing/mozharness/external_tools/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..02c8ce873
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl
Binary files differ
diff --git a/testing/mozharness/external_tools/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl b/testing/mozharness/external_tools/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..506d5e520
--- /dev/null
+++ b/testing/mozharness/external_tools/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl
Binary files differ
diff --git a/testing/mozharness/mach_commands.py b/testing/mozharness/mach_commands.py
new file mode 100644
index 000000000..f453397db
--- /dev/null
+++ b/testing/mozharness/mach_commands.py
@@ -0,0 +1,196 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import urllib
+import urlparse
+
+import mozinfo
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+from mozbuild.base import MachCommandBase, MozbuildObject
+from mozbuild.base import MachCommandConditions as conditions
+from argparse import ArgumentParser
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("suite_name", nargs=1, type=str, action="store",
+ help="Suite to run in mozharness")
+ parser.add_argument("mozharness_args", nargs=argparse.REMAINDER,
+ help="Extra arguments to pass to mozharness")
+ return parser
+
+class MozharnessRunner(MozbuildObject):
+ def __init__(self, *args, **kwargs):
+ MozbuildObject.__init__(self, *args, **kwargs)
+
+
+ self.test_packages_url = self._test_packages_url()
+ self.installer_url = self._installer_url()
+
+ desktop_unittest_config = [
+ "--config-file", lambda: self.config_path("unittests",
+ "%s_unittest.py" % mozinfo.info['os']),
+ "--config-file", lambda: self.config_path("developer_config.py")]
+
+ self.config = {
+ "__defaults__": {
+ "config": ["--no-read-buildbot-config",
+ "--download-symbols", "ondemand",
+ "--installer-url", self.installer_url,
+ "--test-packages-url", self.test_packages_url]
+ },
+
+ "mochitest-valgrind": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--mochitest-suite", "valgrind-plain"]
+ },
+ "mochitest": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--mochitest-suite", "plain"]
+ },
+ "mochitest-chrome": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--mochitest-suite", "chrome"]
+ },
+ "mochitest-browser-chrome": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--mochitest-suite", "browser-chrome"]
+ },
+ "mochitest-devtools-chrome": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--mochitest-suite", "mochitest-devtools-chrome"]
+ },
+ "crashtest": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--reftest-suite", "crashtest"]
+ },
+ "jsreftest": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--reftest-suite", "jsreftest"]
+ },
+ "reftest": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--reftest-suite", "reftest"]
+ },
+ "reftest-no-accel": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--reftest-suite", "reftest-no-accel"]
+ },
+ "cppunittest": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--cppunittest-suite", "cppunittest"]
+ },
+ "xpcshell": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--xpcshell-suite", "xpcshell"]
+ },
+ "xpcshell-addons": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--xpcshell-suite", "xpcshell-addons"]
+ },
+ "jittest": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--jittest-suite", "jittest"]
+ },
+ "mozbase": {
+ "script": "desktop_unittest.py",
+ "config": desktop_unittest_config + [
+ "--mozbase-suite", "mozbase"]
+ },
+ "marionette": {
+ "script": "marionette.py",
+ "config": ["--config-file", self.config_path("marionette",
+ "test_config.py")]
+ },
+ "web-platform-tests": {
+ "script": "web_platform_tests.py",
+ "config": ["--config-file", self.config_path("web_platform_tests",
+ self.wpt_config)]
+ },
+ }
+
+
+ def path_to_url(self, path):
+ return urlparse.urljoin('file:', urllib.pathname2url(path))
+
+ def _installer_url(self):
+ package_re = {
+ "linux": re.compile("^firefox-\d+\..+\.tar\.bz2$"),
+ "win": re.compile("^firefox-\d+\..+\.installer\.exe$"),
+ "mac": re.compile("^firefox-\d+\..+\.mac(?:64)?\.dmg$"),
+ }[mozinfo.info['os']]
+ dist_path = os.path.join(self.topobjdir, "dist")
+ filenames = [item for item in os.listdir(dist_path) if
+ package_re.match(item)]
+ assert len(filenames) == 1
+ return self.path_to_url(os.path.join(dist_path, filenames[0]))
+
+ def _test_packages_url(self):
+ dist_path = os.path.join(self.topobjdir, "dist")
+ filenames = [item for item in os.listdir(dist_path) if
+ item.endswith('test_packages.json')]
+ assert len(filenames) == 1
+ return self.path_to_url(os.path.join(dist_path, filenames[0]))
+
+ def config_path(self, *parts):
+ return self.path_to_url(os.path.join(self.topsrcdir, "testing", "mozharness",
+ "configs", *parts))
+
+ @property
+ def wpt_config(self):
+ return "test_config.py" if mozinfo.info['os'] != "win" else "test_config_windows.py"
+
+ def run_suite(self, suite, **kwargs):
+ default_config = self.config.get("__defaults__")
+ suite_config = self.config.get(suite)
+
+ if suite_config is None:
+ print("Unknown suite %s" % suite)
+ return 1
+
+ script = os.path.join(self.topsrcdir, "testing", "mozharness",
+ "scripts", suite_config["script"])
+ options = [item() if callable(item) else item
+ for item in default_config["config"] + suite_config["config"]]
+
+ cmd = [script] + options
+
+ rv = subprocess.call(cmd, cwd=os.path.dirname(script))
+ return rv
+
+
+@CommandProvider
+class MozharnessCommands(MachCommandBase):
+ @Command('mozharness', category='testing',
+ description='Run tests using mozharness.',
+ conditions=[conditions.is_firefox],
+ parser=get_parser)
+ def mozharness(self, **kwargs):
+ runner = self._spawn(MozharnessRunner)
+ return runner.run_suite(kwargs.pop("suite_name")[0], **kwargs)
diff --git a/testing/mozharness/mozfile/__init__.py b/testing/mozharness/mozfile/__init__.py
new file mode 100644
index 000000000..37b8babb8
--- /dev/null
+++ b/testing/mozharness/mozfile/__init__.py
@@ -0,0 +1,5 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozfile import *
diff --git a/testing/mozharness/mozfile/mozfile.py b/testing/mozharness/mozfile/mozfile.py
new file mode 100644
index 000000000..ac0edcab4
--- /dev/null
+++ b/testing/mozharness/mozfile/mozfile.py
@@ -0,0 +1,372 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from contextlib import contextmanager
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import urlparse
+import urllib2
+import zipfile
+import time
+
+__all__ = ['extract_tarball',
+ 'extract_zip',
+ 'extract',
+ 'is_url',
+ 'load',
+ 'remove',
+ 'rmtree',
+ 'tree',
+ 'NamedTemporaryFile',
+ 'TemporaryDirectory']
+
+try:
+ WindowsError
+except NameError:
+ WindowsError = None # so we can unconditionally catch it later...
+
+
+### utilities for extracting archives
+
+def extract_tarball(src, dest):
+ """extract a .tar file"""
+
+ bundle = tarfile.open(src)
+ namelist = bundle.getnames()
+
+ for name in namelist:
+ bundle.extract(name, path=dest)
+ bundle.close()
+ return namelist
+
+
+def extract_zip(src, dest):
+ """extract a zip file"""
+
+ if isinstance(src, zipfile.ZipFile):
+ bundle = src
+ else:
+ try:
+ bundle = zipfile.ZipFile(src)
+ except Exception, e:
+ print "src: %s" % src
+ raise
+
+ namelist = bundle.namelist()
+
+ for name in namelist:
+ filename = os.path.realpath(os.path.join(dest, name))
+ if name.endswith('/'):
+ if not os.path.isdir(filename):
+ os.makedirs(filename)
+ else:
+ path = os.path.dirname(filename)
+ if not os.path.isdir(path):
+ os.makedirs(path)
+ _dest = open(filename, 'wb')
+ _dest.write(bundle.read(name))
+ _dest.close()
+ mode = bundle.getinfo(name).external_attr >> 16 & 0x1FF
+ os.chmod(filename, mode)
+ bundle.close()
+ return namelist
+
+
+def extract(src, dest=None):
+ """
+ Takes in a tar or zip file and extracts it to dest
+
+ If dest is not specified, extracts to os.path.dirname(src)
+
+ Returns the list of top level files that were extracted
+ """
+
+ assert os.path.exists(src), "'%s' does not exist" % src
+
+ if dest is None:
+ dest = os.path.dirname(src)
+ elif not os.path.isdir(dest):
+ os.makedirs(dest)
+ assert not os.path.isfile(dest), "dest cannot be a file"
+
+ if zipfile.is_zipfile(src):
+ namelist = extract_zip(src, dest)
+ elif tarfile.is_tarfile(src):
+ namelist = extract_tarball(src, dest)
+ else:
+ raise Exception("mozfile.extract: no archive format found for '%s'" %
+ src)
+
+ # namelist returns paths with forward slashes even in windows
+ top_level_files = [os.path.join(dest, name.rstrip('/')) for name in namelist
+ if len(name.rstrip('/').split('/')) == 1]
+
+ # namelist doesn't include folders, append these to the list
+ for name in namelist:
+ index = name.find('/')
+ if index != -1:
+ root = os.path.join(dest, name[:index])
+ if root not in top_level_files:
+ top_level_files.append(root)
+
+ return top_level_files
+
+
+### utilities for removal of files and directories
+
+def rmtree(dir):
+ """Deprecated wrapper method to remove a directory tree.
+
+ Ensure to update your code to use mozfile.remove() directly
+
+ :param dir: directory to be removed
+ """
+
+ return remove(dir)
+
+
+def remove(path):
+ """Removes the specified file, link, or directory tree
+
+ This is a replacement for shutil.rmtree that works better under
+ windows.
+
+ :param path: path to be removed
+ """
+
+ def _call_with_windows_retry(func, path, retry_max=5, retry_delay=0.5):
+ """
+ It's possible to see spurious errors on Windows due to various things
+ keeping a handle to the directory open (explorer, virus scanners, etc)
+ So we try a few times if it fails with a known error.
+ """
+ retry_count = 0
+ while True:
+ try:
+ func(path)
+ break
+ except WindowsError as e:
+ # Error 5 == Access is denied
+ # Error 32 == The process cannot access the file because it is
+ # being used by another process
+ # Error 145 == The directory is not empty
+
+ if retry_count == retry_max or e.winerror not in [5, 32, 145]:
+ raise
+ retry_count += 1
+
+ print 'Retrying to remove "%s" because it is in use.' % path
+ time.sleep(retry_delay)
+
+ if not os.path.exists(path):
+ return
+
+ path_stats = os.stat(path)
+
+ if os.path.isfile(path) or os.path.islink(path):
+ # Verify the file or link is read/write for the current user
+ os.chmod(path, path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR)
+ _call_with_windows_retry(os.remove, path)
+
+ elif os.path.isdir(path):
+ # Verify the directory is read/write/execute for the current user
+ os.chmod(path, path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
+ _call_with_windows_retry(shutil.rmtree, path)
+
+def depth(directory):
+ """returns the integer depth of a directory or path relative to '/' """
+
+ directory = os.path.abspath(directory)
+ level = 0
+ while True:
+ directory, remainder = os.path.split(directory)
+ level += 1
+ if not remainder:
+ break
+ return level
+
+# ASCII delimeters
+ascii_delimeters = {
+ 'vertical_line' : '|',
+ 'item_marker' : '+',
+ 'last_child' : '\\'
+ }
+
+# unicode delimiters
+unicode_delimeters = {
+ 'vertical_line' : '│',
+ 'item_marker' : '├',
+ 'last_child' : 'â””'
+ }
+
+def tree(directory,
+ item_marker=unicode_delimeters['item_marker'],
+ vertical_line=unicode_delimeters['vertical_line'],
+ last_child=unicode_delimeters['last_child'],
+ sort_key=lambda x: x.lower()):
+ """
+ display tree directory structure for `directory`
+ """
+
+ retval = []
+ indent = []
+ last = {}
+ top = depth(directory)
+
+ for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
+
+ abspath = os.path.abspath(dirpath)
+ basename = os.path.basename(abspath)
+ parent = os.path.dirname(abspath)
+ level = depth(abspath) - top
+
+ # sort articles of interest
+ for resource in (dirnames, filenames):
+ resource[:] = sorted(resource, key=sort_key)
+
+ files_end = item_marker
+ dirpath_marker = item_marker
+
+ if level > len(indent):
+ indent.append(vertical_line)
+ indent = indent[:level]
+
+ if dirnames:
+ files_end = item_marker
+ last[abspath] = dirnames[-1]
+ else:
+ files_end = last_child
+
+ if last.get(parent) == os.path.basename(abspath):
+ # last directory of parent
+ dirpath_mark = last_child
+ indent[-1] = ' '
+ elif not indent:
+ dirpath_mark = ''
+ else:
+ dirpath_mark = item_marker
+
+ # append the directory and piece of tree structure
+ # if the top-level entry directory, print as passed
+ retval.append('%s%s%s'% (''.join(indent[:-1]),
+ dirpath_mark,
+ basename if retval else directory))
+ # add the files
+ if filenames:
+ last_file = filenames[-1]
+ retval.extend([('%s%s%s' % (''.join(indent),
+ files_end if filename == last_file else item_marker,
+ filename))
+ for index, filename in enumerate(filenames)])
+
+ return '\n'.join(retval)
+
+
+### utilities for temporary resources
+
+class NamedTemporaryFile(object):
+ """
+ Like tempfile.NamedTemporaryFile except it works on Windows
+ in the case where you open the created file a second time.
+
+ This behaves very similarly to tempfile.NamedTemporaryFile but may
+ not behave exactly the same. For example, this function does not
+ prevent fd inheritance by children.
+
+ Example usage:
+
+ with NamedTemporaryFile() as fh:
+ fh.write(b'foobar')
+
+ print('Filename: %s' % fh.name)
+
+ see https://bugzilla.mozilla.org/show_bug.cgi?id=821362
+ """
+ def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='tmp',
+ dir=None, delete=True):
+
+ fd, path = tempfile.mkstemp(suffix, prefix, dir, 't' in mode)
+ os.close(fd)
+
+ self.file = open(path, mode)
+ self._path = path
+ self._delete = delete
+ self._unlinked = False
+
+ def __getattr__(self, k):
+ return getattr(self.__dict__['file'], k)
+
+ def __iter__(self):
+ return self.__dict__['file']
+
+ def __enter__(self):
+ self.file.__enter__()
+ return self
+
+ def __exit__(self, exc, value, tb):
+ self.file.__exit__(exc, value, tb)
+ if self.__dict__['_delete']:
+ os.unlink(self.__dict__['_path'])
+ self._unlinked = True
+
+ def __del__(self):
+ if self.__dict__['_unlinked']:
+ return
+ self.file.__exit__(None, None, None)
+ if self.__dict__['_delete']:
+ os.unlink(self.__dict__['_path'])
+
+
+@contextmanager
+def TemporaryDirectory():
+ """
+ create a temporary directory using tempfile.mkdtemp, and then clean it up.
+
+ Example usage:
+ with TemporaryDirectory() as tmp:
+ open(os.path.join(tmp, "a_temp_file"), "w").write("data")
+
+ """
+ tempdir = tempfile.mkdtemp()
+ try:
+ yield tempdir
+ finally:
+ shutil.rmtree(tempdir)
+
+
+### utilities dealing with URLs
+
+def is_url(thing):
+ """
+ Return True if thing looks like a URL.
+ """
+
+ parsed = urlparse.urlparse(thing)
+ if 'scheme' in parsed:
+ return len(parsed.scheme) >= 2
+ else:
+ return len(parsed[0]) >= 2
+
+def load(resource):
+ """
+ open a file or URL for reading. If the passed resource string is not a URL,
+ or begins with 'file://', return a ``file``. Otherwise, return the
+ result of urllib2.urlopen()
+ """
+
+ # handle file URLs separately due to python stdlib limitations
+ if resource.startswith('file://'):
+ resource = resource[len('file://'):]
+
+ if not is_url(resource):
+ # if no scheme is given, it is a file path
+ return file(resource)
+
+ return urllib2.urlopen(resource)
+
diff --git a/testing/mozharness/mozharness/__init__.py b/testing/mozharness/mozharness/__init__.py
new file mode 100644
index 000000000..609f98f33
--- /dev/null
+++ b/testing/mozharness/mozharness/__init__.py
@@ -0,0 +1,2 @@
+version = (0, 7)
+version_string = '.'.join(['%d' % i for i in version])
diff --git a/testing/mozharness/mozharness/base/__init__.py b/testing/mozharness/mozharness/base/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/base/__init__.py
diff --git a/testing/mozharness/mozharness/base/config.py b/testing/mozharness/mozharness/base/config.py
new file mode 100644
index 000000000..9c17b3381
--- /dev/null
+++ b/testing/mozharness/mozharness/base/config.py
@@ -0,0 +1,569 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic config parsing and dumping, the way I remember it from scripts
+gone by.
+
+The config should be built from script-level defaults, overlaid by
+config-file defaults, overlaid by command line options.
+
+ (For buildbot-analogues that would be factory-level defaults,
+ builder-level defaults, and build request/scheduler settings.)
+
+The config should then be locked (set to read-only, to prevent runtime
+alterations). Afterwards we should dump the config to a file that is
+uploaded with the build, and can be used to debug or replicate the build
+at a later time.
+
+TODO:
+
+* check_required_settings or something -- run at init, assert that
+ these settings are set.
+"""
+
+from copy import deepcopy
+from optparse import OptionParser, Option, OptionGroup
+import os
+import sys
+import urllib2
+import socket
+import time
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+from mozharness.base.log import DEBUG, INFO, WARNING, ERROR, CRITICAL, FATAL
+
+
+# optparse {{{1
+class ExtendedOptionParser(OptionParser):
+ """OptionParser, but with ExtendOption as the option_class.
+ """
+ def __init__(self, **kwargs):
+ kwargs['option_class'] = ExtendOption
+ OptionParser.__init__(self, **kwargs)
+
+
+class ExtendOption(Option):
+ """from http://docs.python.org/library/optparse.html?highlight=optparse#adding-new-actions"""
+ ACTIONS = Option.ACTIONS + ("extend",)
+ STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
+ TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
+ ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
+
+ def take_action(self, action, dest, opt, value, values, parser):
+ if action == "extend":
+ lvalue = value.split(",")
+ values.ensure_value(dest, []).extend(lvalue)
+ else:
+ Option.take_action(
+ self, action, dest, opt, value, values, parser)
+
+
+def make_immutable(item):
+ if isinstance(item, list) or isinstance(item, tuple):
+ result = LockedTuple(item)
+ elif isinstance(item, dict):
+ result = ReadOnlyDict(item)
+ result.lock()
+ else:
+ result = item
+ return result
+
+
+class LockedTuple(tuple):
+ def __new__(cls, items):
+ return tuple.__new__(cls, (make_immutable(x) for x in items))
+ def __deepcopy__(self, memo):
+ return [deepcopy(elem, memo) for elem in self]
+
+
+# ReadOnlyDict {{{1
+class ReadOnlyDict(dict):
+ def __init__(self, dictionary):
+ self._lock = False
+ self.update(dictionary.copy())
+
+ def _check_lock(self):
+ assert not self._lock, "ReadOnlyDict is locked!"
+
+ def lock(self):
+ for (k, v) in self.items():
+ self[k] = make_immutable(v)
+ self._lock = True
+
+ def __setitem__(self, *args):
+ self._check_lock()
+ return dict.__setitem__(self, *args)
+
+ def __delitem__(self, *args):
+ self._check_lock()
+ return dict.__delitem__(self, *args)
+
+ def clear(self, *args):
+ self._check_lock()
+ return dict.clear(self, *args)
+
+ def pop(self, *args):
+ self._check_lock()
+ return dict.pop(self, *args)
+
+ def popitem(self, *args):
+ self._check_lock()
+ return dict.popitem(self, *args)
+
+ def setdefault(self, *args):
+ self._check_lock()
+ return dict.setdefault(self, *args)
+
+ def update(self, *args):
+ self._check_lock()
+ dict.update(self, *args)
+
+ def __deepcopy__(self, memo):
+ cls = self.__class__
+ result = cls.__new__(cls)
+ memo[id(self)] = result
+ for k, v in self.__dict__.items():
+ setattr(result, k, deepcopy(v, memo))
+ result._lock = False
+ for k, v in self.items():
+ result[k] = deepcopy(v, memo)
+ return result
+
+# parse_config_file {{{1
+def parse_config_file(file_name, quiet=False, search_path=None,
+ config_dict_name="config"):
+ """Read a config file and return a dictionary.
+ """
+ file_path = None
+ if os.path.exists(file_name):
+ file_path = file_name
+ else:
+ if not search_path:
+ search_path = ['.', os.path.join(sys.path[0], '..', 'configs'),
+ os.path.join(sys.path[0], '..', '..', 'configs')]
+ for path in search_path:
+ if os.path.exists(os.path.join(path, file_name)):
+ file_path = os.path.join(path, file_name)
+ break
+ else:
+ raise IOError("Can't find %s in %s!" % (file_name, search_path))
+ if file_name.endswith('.py'):
+ global_dict = {}
+ local_dict = {}
+ execfile(file_path, global_dict, local_dict)
+ config = local_dict[config_dict_name]
+ elif file_name.endswith('.json'):
+ fh = open(file_path)
+ config = {}
+ json_config = json.load(fh)
+ config = dict(json_config)
+ fh.close()
+ else:
+ raise RuntimeError("Unknown config file type %s!" % file_name)
+ # TODO return file_path
+ return config
+
+
+def download_config_file(url, file_name):
+ n = 0
+ attempts = 5
+ sleeptime = 60
+ max_sleeptime = 5 * 60
+ while True:
+ if n >= attempts:
+ print "Failed to download from url %s after %d attempts, quiting..." % (url, attempts)
+ raise SystemError(-1)
+ try:
+ contents = urllib2.urlopen(url, timeout=30).read()
+ break
+ except urllib2.URLError, e:
+ print "Error downloading from url %s: %s" % (url, str(e))
+ except socket.timeout, e:
+ print "Time out accessing %s: %s" % (url, str(e))
+ except socket.error, e:
+ print "Socket error when accessing %s: %s" % (url, str(e))
+ print "Sleeping %d seconds before retrying" % sleeptime
+ time.sleep(sleeptime)
+ sleeptime = sleeptime * 2
+ if sleeptime > max_sleeptime:
+ sleeptime = max_sleeptime
+ n += 1
+
+ try:
+ f = open(file_name, 'w')
+ f.write(contents)
+ f.close()
+ except IOError, e:
+ print "Error writing downloaded contents to file %s: %s" % (file_name, str(e))
+ raise SystemError(-1)
+
+
+# BaseConfig {{{1
+class BaseConfig(object):
+ """Basic config setting/getting.
+ """
+ def __init__(self, config=None, initial_config_file=None, config_options=None,
+ all_actions=None, default_actions=None,
+ volatile_config=None, option_args=None,
+ require_config_file=False,
+ append_env_variables_from_configs=False,
+ usage="usage: %prog [options]"):
+ self._config = {}
+ self.all_cfg_files_and_dicts = []
+ self.actions = []
+ self.config_lock = False
+ self.require_config_file = require_config_file
+ # It allows to append env variables from multiple config files
+ self.append_env_variables_from_configs = append_env_variables_from_configs
+
+ if all_actions:
+ self.all_actions = all_actions[:]
+ else:
+ self.all_actions = ['clobber', 'build']
+ if default_actions:
+ self.default_actions = default_actions[:]
+ else:
+ self.default_actions = self.all_actions[:]
+ if volatile_config is None:
+ self.volatile_config = {
+ 'actions': None,
+ 'add_actions': None,
+ 'no_actions': None,
+ }
+ else:
+ self.volatile_config = deepcopy(volatile_config)
+
+ if config:
+ self.set_config(config)
+ if initial_config_file:
+ initial_config = parse_config_file(initial_config_file)
+ self.all_cfg_files_and_dicts.append(
+ (initial_config_file, initial_config)
+ )
+ self.set_config(initial_config)
+ # Since initial_config_file is only set when running unit tests,
+ # if no option_args have been specified, then the parser will
+ # parse sys.argv which in this case would be the command line
+ # options specified to run the tests, e.g. nosetests -v. Clearly,
+ # the options passed to nosetests (such as -v) should not be
+ # interpreted by mozharness as mozharness options, so we specify
+ # a dummy command line with no options, so that the parser does
+ # not add anything from the test invocation command line
+ # arguments to the mozharness options.
+ if option_args is None:
+ option_args=['dummy_mozharness_script_with_no_command_line_options.py']
+ if config_options is None:
+ config_options = []
+ self._create_config_parser(config_options, usage)
+ # we allow manually passing of option args for things like nosetests
+ self.parse_args(args=option_args)
+
+ def get_read_only_config(self):
+ return ReadOnlyDict(self._config)
+
+ def _create_config_parser(self, config_options, usage):
+ self.config_parser = ExtendedOptionParser(usage=usage)
+ self.config_parser.add_option(
+ "--work-dir", action="store", dest="work_dir",
+ type="string", default="build",
+ help="Specify the work_dir (subdir of base_work_dir)"
+ )
+ self.config_parser.add_option(
+ "--base-work-dir", action="store", dest="base_work_dir",
+ type="string", default=os.getcwd(),
+ help="Specify the absolute path of the parent of the working directory"
+ )
+ self.config_parser.add_option(
+ "-c", "--config-file", "--cfg", action="extend", dest="config_files",
+ type="string", help="Specify a config file; can be repeated"
+ )
+ self.config_parser.add_option(
+ "-C", "--opt-config-file", "--opt-cfg", action="extend",
+ dest="opt_config_files", type="string", default=[],
+ help="Specify an optional config file, like --config-file but with no "
+ "error if the file is missing; can be repeated"
+ )
+ self.config_parser.add_option(
+ "--dump-config", action="store_true",
+ dest="dump_config",
+ help="List and dump the config generated from this run to "
+ "a JSON file."
+ )
+ self.config_parser.add_option(
+ "--dump-config-hierarchy", action="store_true",
+ dest="dump_config_hierarchy",
+ help="Like --dump-config but will list and dump which config "
+ "files were used making up the config and specify their own "
+ "keys/values that were not overwritten by another cfg -- "
+ "held the highest hierarchy."
+ )
+
+ # Logging
+ log_option_group = OptionGroup(self.config_parser, "Logging")
+ log_option_group.add_option(
+ "--log-level", action="store",
+ type="choice", dest="log_level", default=INFO,
+ choices=[DEBUG, INFO, WARNING, ERROR, CRITICAL, FATAL],
+ help="Set log level (debug|info|warning|error|critical|fatal)"
+ )
+ log_option_group.add_option(
+ "-q", "--quiet", action="store_false", dest="log_to_console",
+ default=True, help="Don't log to the console"
+ )
+ log_option_group.add_option(
+ "--append-to-log", action="store_true",
+ dest="append_to_log", default=False,
+ help="Append to the log"
+ )
+ log_option_group.add_option(
+ "--multi-log", action="store_const", const="multi",
+ dest="log_type", help="Log using MultiFileLogger"
+ )
+ log_option_group.add_option(
+ "--simple-log", action="store_const", const="simple",
+ dest="log_type", help="Log using SimpleFileLogger"
+ )
+ self.config_parser.add_option_group(log_option_group)
+
+ # Actions
+ action_option_group = OptionGroup(
+ self.config_parser, "Actions",
+ "Use these options to list or enable/disable actions."
+ )
+ action_option_group.add_option(
+ "--list-actions", action="store_true",
+ dest="list_actions",
+ help="List all available actions, then exit"
+ )
+ action_option_group.add_option(
+ "--add-action", action="extend",
+ dest="add_actions", metavar="ACTIONS",
+ help="Add action %s to the list of actions" % self.all_actions
+ )
+ action_option_group.add_option(
+ "--no-action", action="extend",
+ dest="no_actions", metavar="ACTIONS",
+ help="Don't perform action"
+ )
+ for action in self.all_actions:
+ action_option_group.add_option(
+ "--%s" % action, action="append_const",
+ dest="actions", const=action,
+ help="Add %s to the limited list of actions" % action
+ )
+ action_option_group.add_option(
+ "--no-%s" % action, action="append_const",
+ dest="no_actions", const=action,
+ help="Remove %s from the list of actions to perform" % action
+ )
+ self.config_parser.add_option_group(action_option_group)
+ # Child-specified options
+ # TODO error checking for overlapping options
+ if config_options:
+ for option in config_options:
+ self.config_parser.add_option(*option[0], **option[1])
+
+ # Initial-config-specified options
+ config_options = self._config.get('config_options', None)
+ if config_options:
+ for option in config_options:
+ self.config_parser.add_option(*option[0], **option[1])
+
+ def set_config(self, config, overwrite=False):
+ """This is probably doable some other way."""
+ if self._config and not overwrite:
+ self._config.update(config)
+ else:
+ self._config = config
+ return self._config
+
+ def get_actions(self):
+ return self.actions
+
+ def verify_actions(self, action_list, quiet=False):
+ for action in action_list:
+ if action not in self.all_actions:
+ if not quiet:
+ print("Invalid action %s not in %s!" % (action,
+ self.all_actions))
+ raise SystemExit(-1)
+ return action_list
+
+ def verify_actions_order(self, action_list):
+ try:
+ indexes = [ self.all_actions.index(elt) for elt in action_list ]
+ sorted_indexes = sorted(indexes)
+ for i in range(len(indexes)):
+ if indexes[i] != sorted_indexes[i]:
+ print(("Action %s comes in different order in %s\n" +
+ "than in %s") % (action_list[i], action_list, self.all_actions))
+ raise SystemExit(-1)
+ except ValueError as e:
+ print("Invalid action found: " + str(e))
+ raise SystemExit(-1)
+
+ def list_actions(self):
+ print "Actions available:"
+ for a in self.all_actions:
+ print " " + ("*" if a in self.default_actions else " "), a
+ raise SystemExit(0)
+
+ def get_cfgs_from_files(self, all_config_files, options):
+ """Returns the configuration derived from the list of configuration
+ files. The result is represented as a list of `(filename,
+ config_dict)` tuples; they will be combined with keys in later
+ dictionaries taking precedence over earlier.
+
+ `all_config_files` is all files specified with `--config-file` and
+ `--opt-config-file`; `options` is the argparse options object giving
+ access to any other command-line options.
+
+ This function is also responsible for downloading any configuration
+ files specified by URL. It uses ``parse_config_file`` in this module
+ to parse individual files.
+
+ This method can be overridden in a subclass to add extra logic to the
+ way that self.config is made up. See
+ `mozharness.mozilla.building.buildbase.BuildingConfig` for an example.
+ """
+ all_cfg_files_and_dicts = []
+ for cf in all_config_files:
+ try:
+ if '://' in cf: # config file is an url
+ file_name = os.path.basename(cf)
+ file_path = os.path.join(os.getcwd(), file_name)
+ download_config_file(cf, file_path)
+ all_cfg_files_and_dicts.append(
+ (file_path, parse_config_file(file_path))
+ )
+ else:
+ all_cfg_files_and_dicts.append((cf, parse_config_file(cf)))
+ except Exception:
+ if cf in options.opt_config_files:
+ print(
+ "WARNING: optional config file not found %s" % cf
+ )
+ else:
+ raise
+ return all_cfg_files_and_dicts
+
+ def parse_args(self, args=None):
+ """Parse command line arguments in a generic way.
+ Return the parser object after adding the basic options, so
+ child objects can manipulate it.
+ """
+ self.command_line = ' '.join(sys.argv)
+ if args is None:
+ args = sys.argv[1:]
+ (options, args) = self.config_parser.parse_args(args)
+
+ defaults = self.config_parser.defaults.copy()
+
+ if not options.config_files:
+ if self.require_config_file:
+ if options.list_actions:
+ self.list_actions()
+ print("Required config file not set! (use --config-file option)")
+ raise SystemExit(-1)
+ else:
+ # this is what get_cfgs_from_files returns. It will represent each
+ # config file name and its assoctiated dict
+ # eg ('builds/branch_specifics.py', {'foo': 'bar'})
+ # let's store this to self for things like --interpret-config-files
+ self.all_cfg_files_and_dicts.extend(self.get_cfgs_from_files(
+ # append opt_config to allow them to overwrite previous configs
+ options.config_files + options.opt_config_files, options=options
+ ))
+ config = {}
+ if self.append_env_variables_from_configs:
+ # We only append values from various configs for the 'env' entry
+ # For everything else we follow the standard behaviour
+ for i, (c_file, c_dict) in enumerate(self.all_cfg_files_and_dicts):
+ for v in c_dict.keys():
+ if v == 'env' and v in config:
+ config[v].update(c_dict[v])
+ else:
+ config[v] = c_dict[v]
+ else:
+ for i, (c_file, c_dict) in enumerate(self.all_cfg_files_and_dicts):
+ config.update(c_dict)
+ # assign or update self._config depending on if it exists or not
+ # NOTE self._config will be passed to ReadOnlyConfig's init -- a
+ # dict subclass with immutable locking capabilities -- and serve
+ # as the keys/values that make up that instance. Ultimately,
+ # this becomes self.config during BaseScript's init
+ self.set_config(config)
+ for key in defaults.keys():
+ value = getattr(options, key)
+ if value is None:
+ continue
+ # Don't override config_file defaults with config_parser defaults
+ if key in defaults and value == defaults[key] and key in self._config:
+ continue
+ self._config[key] = value
+
+ # The idea behind the volatile_config is we don't want to save this
+ # info over multiple runs. This defaults to the action-specific
+ # config options, but can be anything.
+ for key in self.volatile_config.keys():
+ if self._config.get(key) is not None:
+ self.volatile_config[key] = self._config[key]
+ del(self._config[key])
+
+ self.update_actions()
+ if options.list_actions:
+ self.list_actions()
+
+ # Keep? This is for saving the volatile config in the dump_config
+ self._config['volatile_config'] = self.volatile_config
+
+ self.options = options
+ self.args = args
+ return (self.options, self.args)
+
+ def update_actions(self):
+ """ Update actions after reading in config.
+
+ Seems a little complex, but the logic goes:
+
+ First, if default_actions is specified in the config, set our
+ default actions even if the script specifies other default actions.
+
+ Without any other action-specific options, run with default actions.
+
+ If we specify --ACTION or --only-ACTION once or multiple times,
+ we want to override the default_actions list with the one(s) we list.
+
+ Otherwise, if we specify --add-action ACTION, we want to add an
+ action to the list.
+
+ Finally, if we specify --no-ACTION, remove that from the list of
+ actions to perform.
+ """
+ if self._config.get('default_actions'):
+ default_actions = self.verify_actions(self._config['default_actions'])
+ self.default_actions = default_actions
+ self.verify_actions_order(self.default_actions)
+ self.actions = self.default_actions[:]
+ if self.volatile_config['actions']:
+ actions = self.verify_actions(self.volatile_config['actions'])
+ self.actions = actions
+ elif self.volatile_config['add_actions']:
+ actions = self.verify_actions(self.volatile_config['add_actions'])
+ self.actions.extend(actions)
+ if self.volatile_config['no_actions']:
+ actions = self.verify_actions(self.volatile_config['no_actions'])
+ for action in actions:
+ if action in self.actions:
+ self.actions.remove(action)
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ pass
diff --git a/testing/mozharness/mozharness/base/diskutils.py b/testing/mozharness/mozharness/base/diskutils.py
new file mode 100644
index 000000000..745384ff9
--- /dev/null
+++ b/testing/mozharness/mozharness/base/diskutils.py
@@ -0,0 +1,156 @@
+"""Disk utility module, no mixins here!
+
+ examples:
+ 1) get disk size
+ from mozharness.base.diskutils import DiskInfo, DiskutilsError
+ ...
+ try:
+ DiskSize().get_size(path='/', unit='Mb')
+ except DiskutilsError as e:
+ # manage the exception e.g: log.error(e)
+ pass
+ log.info("%s" % di)
+
+
+ 2) convert disk size:
+ from mozharness.base.diskutils import DiskutilsError, convert_to
+ ...
+ file_size = <function that gets file size in bytes>
+ # convert file_size to GB
+ try:
+ file_size = convert_to(file_size, from_unit='bytes', to_unit='GB')
+ except DiskutilsError as e:
+ # manage the exception e.g: log.error(e)
+ pass
+
+"""
+import ctypes
+import os
+import sys
+import logging
+from mozharness.base.log import INFO, numeric_log_level
+
+# use mozharness log
+log = logging.getLogger(__name__)
+
+
+class DiskutilsError(Exception):
+ """Exception thrown by Diskutils module"""
+ pass
+
+
+def convert_to(size, from_unit, to_unit):
+ """Helper method to convert filesystem sizes to kB/ MB/ GB/ TB/
+ valid values for source_format and destination format are:
+ * bytes
+ * kB
+ * MB
+ * GB
+ * TB
+ returns: size converted from source_format to destination_format.
+ """
+ sizes = {'bytes': 1,
+ 'kB': 1024,
+ 'MB': 1024 * 1024,
+ 'GB': 1024 * 1024 * 1024,
+ 'TB': 1024 * 1024 * 1024 * 1024}
+ try:
+ df = sizes[to_unit]
+ sf = sizes[from_unit]
+ return size * sf / df
+ except KeyError:
+ raise DiskutilsError('conversion error: Invalid source or destination format')
+ except TypeError:
+ raise DiskutilsError('conversion error: size (%s) is not a number' % size)
+
+
+class DiskInfo(object):
+ """Stores basic information about the disk"""
+ def __init__(self):
+ self.unit = 'bytes'
+ self.free = 0
+ self.used = 0
+ self.total = 0
+
+ def __str__(self):
+ string = ['Disk space info (in %s)' % self.unit]
+ string += ['total: %s' % self.total]
+ string += ['used: %s' % self.used]
+ string += ['free: %s' % self.free]
+ return " ".join(string)
+
+ def _to(self, unit):
+ from_unit = self.unit
+ to_unit = unit
+ self.free = convert_to(self.free, from_unit=from_unit, to_unit=to_unit)
+ self.used = convert_to(self.used, from_unit=from_unit, to_unit=to_unit)
+ self.total = convert_to(self.total, from_unit=from_unit, to_unit=to_unit)
+ self.unit = unit
+
+
+class DiskSize(object):
+ """DiskSize object
+ """
+ @staticmethod
+ def _posix_size(path):
+ """returns the disk size in bytes
+ disk size is relative to path
+ """
+ # we are on a POSIX system
+ st = os.statvfs(path)
+ disk_info = DiskInfo()
+ disk_info.free = st.f_bavail * st.f_frsize
+ disk_info.used = (st.f_blocks - st.f_bfree) * st.f_frsize
+ disk_info.total = st.f_blocks * st.f_frsize
+ return disk_info
+
+ @staticmethod
+ def _windows_size(path):
+ """returns size in bytes, works only on windows platforms"""
+ # we're on a non POSIX system (windows)
+ # DLL call
+ disk_info = DiskInfo()
+ dummy = ctypes.c_ulonglong() # needed by the dll call but not used
+ total = ctypes.c_ulonglong() # stores the total space value
+ free = ctypes.c_ulonglong() # stores the free space value
+ # depending on path format (unicode or not) and python version (2 or 3)
+ # we need to call GetDiskFreeSpaceExW or GetDiskFreeSpaceExA
+ called_function = ctypes.windll.kernel32.GetDiskFreeSpaceExA
+ if isinstance(path, unicode) or sys.version_info >= (3,):
+ called_function = ctypes.windll.kernel32.GetDiskFreeSpaceExW
+ # we're ready for the dll call. On error it returns 0
+ if called_function(path,
+ ctypes.byref(dummy),
+ ctypes.byref(total),
+ ctypes.byref(free)) != 0:
+ # success, we can use the values returned by the dll call
+ disk_info.free = free.value
+ disk_info.total = total.value
+ disk_info.used = total.value - free.value
+ return disk_info
+
+ @staticmethod
+ def get_size(path, unit, log_level=INFO):
+ """Disk info stats:
+ total => size of the disk
+ used => space used
+ free => free space
+ In case of error raises a DiskutilError Exception
+ """
+ try:
+ # let's try to get the disk size using os module
+ disk_info = DiskSize()._posix_size(path)
+ except AttributeError:
+ try:
+ # os module failed. let's try to get the size using
+ # ctypes.windll...
+ disk_info = DiskSize()._windows_size(path)
+ except AttributeError:
+ # No luck! This is not a posix nor window platform
+ # raise an exception
+ raise DiskutilsError('Unsupported platform')
+
+ disk_info._to(unit)
+ lvl = numeric_log_level(log_level)
+ log.log(lvl, msg="%s" % disk_info)
+ return disk_info
diff --git a/testing/mozharness/mozharness/base/errors.py b/testing/mozharness/mozharness/base/errors.py
new file mode 100755
index 000000000..9d2f3ebe1
--- /dev/null
+++ b/testing/mozharness/mozharness/base/errors.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic error lists.
+
+Error lists are used to parse output in mozharness.base.log.OutputParser.
+
+Each line of output is matched against each substring or regular expression
+in the error list. On a match, we determine the 'level' of that line,
+whether IGNORE, DEBUG, INFO, WARNING, ERROR, CRITICAL, or FATAL.
+
+TODO: Context lines (requires work on the OutputParser side)
+
+TODO: We could also create classes that generate these, but with the
+appropriate level (please don't die on any errors; please die on any
+warning; etc.) or platform or language or whatever.
+"""
+
+import re
+
+from mozharness.base.log import DEBUG, WARNING, ERROR, CRITICAL, FATAL
+
+
+# Exceptions
+class VCSException(Exception):
+ pass
+
+# ErrorLists {{{1
+BaseErrorList = [{
+ 'substr': r'''command not found''',
+ 'level': ERROR
+}]
+
+# For ssh, scp, rsync over ssh
+SSHErrorList = BaseErrorList + [{
+ 'substr': r'''Name or service not known''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Could not resolve hostname''',
+ 'level': ERROR
+}, {
+ 'substr': r'''POSSIBLE BREAK-IN ATTEMPT''',
+ 'level': WARNING
+}, {
+ 'substr': r'''Network error:''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Access denied''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Authentication refused''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Out of memory''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Connection reset by peer''',
+ 'level': WARNING
+}, {
+ 'substr': r'''Host key verification failed''',
+ 'level': ERROR
+}, {
+ 'substr': r'''WARNING:''',
+ 'level': WARNING
+}, {
+ 'substr': r'''rsync error:''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Broken pipe:''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Permission denied:''',
+ 'level': ERROR
+}, {
+ 'substr': r'''connection unexpectedly closed''',
+ 'level': ERROR
+}, {
+ 'substr': r'''Warning: Identity file''',
+ 'level': ERROR
+}, {
+ 'substr': r'''command-line line 0: Missing argument''',
+ 'level': ERROR
+}]
+
+HgErrorList = BaseErrorList + [{
+ 'regex': re.compile(r'''^abort:'''),
+ 'level': ERROR,
+ 'explanation': 'Automation Error: hg not responding'
+}, {
+ 'substr': r'''unknown exception encountered''',
+ 'level': ERROR,
+ 'explanation': 'Automation Error: python exception in hg'
+}, {
+ 'substr': r'''failed to import extension''',
+ 'level': WARNING,
+ 'explanation': 'Automation Error: hg extension missing'
+}]
+
+GitErrorList = BaseErrorList + [
+ {'substr': r'''Permission denied (publickey).''', 'level': ERROR},
+ {'substr': r'''fatal: The remote end hung up unexpectedly''', 'level': ERROR},
+ {'substr': r'''does not appear to be a git repository''', 'level': ERROR},
+ {'substr': r'''error: src refspec''', 'level': ERROR},
+ {'substr': r'''invalid author/committer line -''', 'level': ERROR},
+ {'substr': r'''remote: fatal: Error in object''', 'level': ERROR},
+ {'substr': r'''fatal: sha1 file '<stdout>' write error: Broken pipe''', 'level': ERROR},
+ {'substr': r'''error: failed to push some refs to ''', 'level': ERROR},
+ {'substr': r'''remote: error: denying non-fast-forward ''', 'level': ERROR},
+ {'substr': r'''! [remote rejected] ''', 'level': ERROR},
+ {'regex': re.compile(r'''remote:.*No such file or directory'''), 'level': ERROR},
+]
+
+PythonErrorList = BaseErrorList + [
+ {'regex': re.compile(r'''Warning:.*Error: '''), 'level': WARNING},
+ {'substr': r'''Traceback (most recent call last)''', 'level': ERROR},
+ {'substr': r'''SyntaxError: ''', 'level': ERROR},
+ {'substr': r'''TypeError: ''', 'level': ERROR},
+ {'substr': r'''NameError: ''', 'level': ERROR},
+ {'substr': r'''ZeroDivisionError: ''', 'level': ERROR},
+ {'regex': re.compile(r'''raise \w*Exception: '''), 'level': CRITICAL},
+ {'regex': re.compile(r'''raise \w*Error: '''), 'level': CRITICAL},
+]
+
+VirtualenvErrorList = [
+ {'substr': r'''not found or a compiler error:''', 'level': WARNING},
+ {'regex': re.compile('''\d+: error: '''), 'level': ERROR},
+ {'regex': re.compile('''\d+: warning: '''), 'level': WARNING},
+ {'regex': re.compile(r'''Downloading .* \(.*\): *([0-9]+%)? *[0-9\.]+[kmKM]b'''), 'level': DEBUG},
+] + PythonErrorList
+
+
+# We may need to have various MakefileErrorLists for differing amounts of
+# warning-ignoring-ness.
+MakefileErrorList = BaseErrorList + PythonErrorList + [
+ {'substr': r'''No rule to make target ''', 'level': ERROR},
+ {'regex': re.compile(r'''akefile.*was not found\.'''), 'level': ERROR},
+ {'regex': re.compile(r'''Stop\.$'''), 'level': ERROR},
+ {'regex': re.compile(r''':\d+: error:'''), 'level': ERROR},
+ {'regex': re.compile(r'''make\[\d+\]: \*\*\* \[.*\] Error \d+'''), 'level': ERROR},
+ {'regex': re.compile(r''':\d+: warning:'''), 'level': WARNING},
+ {'regex': re.compile(r'''make(?:\[\d+\])?: \*\*\*/'''), 'level': ERROR},
+ {'substr': r'''Warning: ''', 'level': WARNING},
+]
+
+TarErrorList = BaseErrorList + [
+ {'substr': r'''(stdin) is not a bzip2 file.''', 'level': ERROR},
+ {'regex': re.compile(r'''Child returned status [1-9]'''), 'level': ERROR},
+ {'substr': r'''Error exit delayed from previous errors''', 'level': ERROR},
+ {'substr': r'''stdin: unexpected end of file''', 'level': ERROR},
+ {'substr': r'''stdin: not in gzip format''', 'level': ERROR},
+ {'substr': r'''Cannot exec: No such file or directory''', 'level': ERROR},
+ {'substr': r''': Error is not recoverable: exiting now''', 'level': ERROR},
+]
+
+ADBErrorList = BaseErrorList + [
+ {'substr': r'''INSTALL_FAILED_''', 'level': ERROR},
+ {'substr': r'''Android Debug Bridge version''', 'level': ERROR},
+ {'substr': r'''error: protocol fault''', 'level': ERROR},
+ {'substr': r'''unable to connect to ''', 'level': ERROR},
+]
+
+JarsignerErrorList = [{
+ 'substr': r'''command not found''',
+ 'level': FATAL
+}, {
+ 'substr': r'''jarsigner error: java.lang.RuntimeException: keystore load: Keystore was tampered with, or password was incorrect''',
+ 'level': FATAL,
+ 'explanation': r'''The store passphrase is probably incorrect!''',
+}, {
+ 'regex': re.compile(r'''jarsigner: key associated with .* not a private key'''),
+ 'level': FATAL,
+ 'explanation': r'''The key passphrase is probably incorrect!''',
+}, {
+ 'regex': re.compile(r'''jarsigner error: java.lang.RuntimeException: keystore load: .* .No such file or directory'''),
+ 'level': FATAL,
+ 'explanation': r'''The keystore doesn't exist!''',
+}, {
+ 'substr': r'''jarsigner: unable to open jar file:''',
+ 'level': FATAL,
+ 'explanation': r'''The apk is missing!''',
+}]
+
+ZipErrorList = BaseErrorList + [{
+ 'substr': r'''zip warning:''',
+ 'level': WARNING,
+}, {
+ 'substr': r'''zip error:''',
+ 'level': ERROR,
+}, {
+ 'substr': r'''Cannot open file: it does not appear to be a valid archive''',
+ 'level': ERROR,
+}]
+
+ZipalignErrorList = BaseErrorList + [{
+ 'regex': re.compile(r'''Unable to open .* as a zip archive'''),
+ 'level': ERROR,
+}, {
+ 'regex': re.compile(r'''Output file .* exists'''),
+ 'level': ERROR,
+}, {
+ 'substr': r'''Input and output can't be the same file''',
+ 'level': ERROR,
+}]
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ '''TODO: unit tests.
+ '''
+ pass
diff --git a/testing/mozharness/mozharness/base/log.py b/testing/mozharness/mozharness/base/log.py
new file mode 100755
index 000000000..2c18b50c3
--- /dev/null
+++ b/testing/mozharness/mozharness/base/log.py
@@ -0,0 +1,694 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic logging classes and functionalities for single and multi file logging.
+Capturing console output and providing general logging functionalities.
+
+Attributes:
+ FATAL_LEVEL (int): constant logging level value set based on the logging.CRITICAL
+ value
+ DEBUG (str): mozharness `debug` log name
+ INFO (str): mozharness `info` log name
+ WARNING (str): mozharness `warning` log name
+ CRITICAL (str): mozharness `critical` log name
+ FATAL (str): mozharness `fatal` log name
+ IGNORE (str): mozharness `ignore` log name
+ LOG_LEVELS (dict): mapping of the mozharness log level names to logging values
+ ROOT_LOGGER (logging.Logger): instance of a logging.Logger class
+
+TODO:
+- network logging support.
+- log rotation config
+"""
+
+from datetime import datetime
+import logging
+import os
+import sys
+import traceback
+
+# Define our own FATAL_LEVEL
+FATAL_LEVEL = logging.CRITICAL + 10
+logging.addLevelName(FATAL_LEVEL, 'FATAL')
+
+# mozharness log levels.
+DEBUG, INFO, WARNING, ERROR, CRITICAL, FATAL, IGNORE = (
+ 'debug', 'info', 'warning', 'error', 'critical', 'fatal', 'ignore')
+
+
+LOG_LEVELS = {
+ DEBUG: logging.DEBUG,
+ INFO: logging.INFO,
+ WARNING: logging.WARNING,
+ ERROR: logging.ERROR,
+ CRITICAL: logging.CRITICAL,
+ FATAL: FATAL_LEVEL
+}
+
+# mozharness root logger
+ROOT_LOGGER = logging.getLogger()
+
+
+# LogMixin {{{1
+class LogMixin(object):
+ """This is a mixin for any object to access similar logging functionality
+
+ The logging functionality described here is specially useful for those
+ objects with self.config and self.log_obj member variables
+ """
+
+ def _log_level_at_least(self, level):
+ """ Check if the current logging level is greater or equal than level
+
+ Args:
+ level (str): log level name to compare against mozharness log levels
+ names
+
+ Returns:
+ bool: True if the current logging level is great or equal than level,
+ False otherwise
+ """
+ log_level = INFO
+ levels = [DEBUG, INFO, WARNING, ERROR, CRITICAL, FATAL]
+ if hasattr(self, 'config'):
+ log_level = self.config.get('log_level', INFO)
+ return levels.index(level) >= levels.index(log_level)
+
+ def _print(self, message, stderr=False):
+ """ prints a message to the sys.stdout or sys.stderr according to the
+ value of the stderr argument.
+
+ Args:
+ message (str): The message to be printed
+ stderr (bool, optional): if True, message will be printed to
+ sys.stderr. Defaults to False.
+
+ Returns:
+ None
+ """
+ if not hasattr(self, 'config') or self.config.get('log_to_console', True):
+ if stderr:
+ print >> sys.stderr, message
+ else:
+ print message
+
+ def log(self, message, level=INFO, exit_code=-1):
+ """ log the message passed to it according to level, exit if level == FATAL
+
+ Args:
+ message (str): message to be logged
+ level (str, optional): logging level of the message. Defaults to INFO
+ exit_code (int, optional): exit code to log before the scripts calls
+ SystemExit.
+
+ Returns:
+ None
+ """
+ if self.log_obj:
+ return self.log_obj.log_message(
+ message, level=level,
+ exit_code=exit_code,
+ post_fatal_callback=self._post_fatal,
+ )
+ if level == INFO:
+ if self._log_level_at_least(level):
+ self._print(message)
+ elif level == DEBUG:
+ if self._log_level_at_least(level):
+ self._print('DEBUG: %s' % message)
+ elif level in (WARNING, ERROR, CRITICAL):
+ if self._log_level_at_least(level):
+ self._print("%s: %s" % (level.upper(), message), stderr=True)
+ elif level == FATAL:
+ if self._log_level_at_least(level):
+ self._print("FATAL: %s" % message, stderr=True)
+ raise SystemExit(exit_code)
+
+ def worst_level(self, target_level, existing_level, levels=None):
+ """Compare target_level with existing_level according to levels values
+ and return the worst among them.
+
+ Args:
+ target_level (str): minimum logging level to which the current object
+ should be set
+ existing_level (str): current logging level
+ levels (list(str), optional): list of logging levels names to compare
+ target_level and existing_level against.
+ Defaults to mozharness log level
+ list sorted from most to less critical.
+
+ Returns:
+ str: the logging lavel that is closest to the first levels value,
+ i.e. levels[0]
+ """
+ if not levels:
+ levels = [FATAL, CRITICAL, ERROR, WARNING, INFO, DEBUG, IGNORE]
+ if target_level not in levels:
+ self.fatal("'%s' not in %s'." % (target_level, levels))
+ for l in levels:
+ if l in (target_level, existing_level):
+ return l
+
+ # Copying Bear's dumpException():
+ # https://hg.mozilla.org/build/tools/annotate/1485f23c38e0/sut_tools/sut_lib.py#l23
+ def exception(self, message=None, level=ERROR):
+ """ log an exception message base on the log level passed to it.
+
+ This function fetches the information of the current exception being handled and
+ adds it to the message argument.
+
+ Args:
+ message (str, optional): message to be printed at the beginning of the log.
+ Default to an empty string.
+ level (str, optional): log level to use for the logging. Defaults to ERROR
+
+ Returns:
+ None
+ """
+ tb_type, tb_value, tb_traceback = sys.exc_info()
+ if message is None:
+ message = ""
+ else:
+ message = "%s\n" % message
+ for s in traceback.format_exception(tb_type, tb_value, tb_traceback):
+ message += "%s\n" % s
+ # Log at the end, as a fatal will attempt to exit after the 1st line.
+ self.log(message, level=level)
+
+ def debug(self, message):
+ """ calls the log method with DEBUG as logging level
+
+ Args:
+ message (str): message to log
+ """
+ self.log(message, level=DEBUG)
+
+ def info(self, message):
+ """ calls the log method with INFO as logging level
+
+ Args:
+ message (str): message to log
+ """
+ self.log(message, level=INFO)
+
+ def warning(self, message):
+ """ calls the log method with WARNING as logging level
+
+ Args:
+ message (str): message to log
+ """
+ self.log(message, level=WARNING)
+
+ def error(self, message):
+ """ calls the log method with ERROR as logging level
+
+ Args:
+ message (str): message to log
+ """
+ self.log(message, level=ERROR)
+
+ def critical(self, message):
+ """ calls the log method with CRITICAL as logging level
+
+ Args:
+ message (str): message to log
+ """
+ self.log(message, level=CRITICAL)
+
+ def fatal(self, message, exit_code=-1):
+ """ calls the log method with FATAL as logging level
+
+ Args:
+ message (str): message to log
+ exit_code (int, optional): exit code to use for the SystemExit
+ exception to be raised. Default to -1.
+ """
+ self.log(message, level=FATAL, exit_code=exit_code)
+
+ def _post_fatal(self, message=None, exit_code=None):
+ """ Sometimes you want to create a report or cleanup
+ or notify on fatal(); override this method to do so.
+
+ Please don't use this for anything significantly long-running.
+
+ Args:
+ message (str, optional): message to report. Default to None
+ exit_code (int, optional): exit code to use for the SystemExit
+ exception to be raised. Default to None
+ """
+ pass
+
+
+# OutputParser {{{1
+class OutputParser(LogMixin):
+ """ Helper object to parse command output.
+
+ This will buffer output if needed, so we can go back and mark
+ [(linenum - 10) : linenum+10] as errors if need be, without having to
+ get all the output first.
+
+ linenum+10 will be easy; we can set self.num_post_context_lines to 10,
+ and self.num_post_context_lines-- as we mark each line to at least error
+ level X.
+
+ linenum-10 will be trickier. We'll not only need to save the line
+ itself, but also the level that we've set for that line previously,
+ whether by matching on that line, or by a previous line's context.
+ We should only log that line if all output has ended (self.finish() ?);
+ otherwise store a list of dictionaries in self.context_buffer that is
+ buffered up to self.num_pre_context_lines (set to the largest
+ pre-context-line setting in error_list.)
+ """
+
+ def __init__(self, config=None, log_obj=None, error_list=None, log_output=True, **kwargs):
+ """Initialization method for the OutputParser class
+
+ Args:
+ config (dict, optional): dictionary containing values such as `log_level`
+ or `log_to_console`. Defaults to `None`.
+ log_obj (BaseLogger, optional): instance of the BaseLogger class. Defaults
+ to `None`.
+ error_list (list, optional): list of the error to look for. Defaults to
+ `None`.
+ log_output (boolean, optional): flag for deciding if the commands
+ output should be logged or not.
+ Defaults to `True`.
+ """
+ self.config = config
+ self.log_obj = log_obj
+ self.error_list = error_list or []
+ self.log_output = log_output
+ self.num_errors = 0
+ self.num_warnings = 0
+ # TODO context_lines.
+ # Not in use yet, but will be based off error_list.
+ self.context_buffer = []
+ self.num_pre_context_lines = 0
+ self.num_post_context_lines = 0
+ self.worst_log_level = INFO
+
+ def parse_single_line(self, line):
+ """ parse a console output line and check if it matches one in `error_list`,
+ if so then log it according to `log_output`.
+
+ Args:
+ line (str): command line output to parse.
+ """
+ for error_check in self.error_list:
+ # TODO buffer for context_lines.
+ match = False
+ if 'substr' in error_check:
+ if error_check['substr'] in line:
+ match = True
+ elif 'regex' in error_check:
+ if error_check['regex'].search(line):
+ match = True
+ else:
+ self.warning("error_list: 'substr' and 'regex' not in %s" %
+ error_check)
+ if match:
+ log_level = error_check.get('level', INFO)
+ if self.log_output:
+ message = ' %s' % line
+ if error_check.get('explanation'):
+ message += '\n %s' % error_check['explanation']
+ if error_check.get('summary'):
+ self.add_summary(message, level=log_level)
+ else:
+ self.log(message, level=log_level)
+ if log_level in (ERROR, CRITICAL, FATAL):
+ self.num_errors += 1
+ if log_level == WARNING:
+ self.num_warnings += 1
+ self.worst_log_level = self.worst_level(log_level,
+ self.worst_log_level)
+ break
+ else:
+ if self.log_output:
+ self.info(' %s' % line)
+
+ def add_lines(self, output):
+ """ process a string or list of strings, decode them to utf-8,strip
+ them of any trailing whitespaces and parse them using `parse_single_line`
+
+ strings consisting only of whitespaces are ignored.
+
+ Args:
+ output (str | list): string or list of string to parse
+ """
+
+ if isinstance(output, basestring):
+ output = [output]
+ for line in output:
+ if not line or line.isspace():
+ continue
+ line = line.decode("utf-8", 'replace').rstrip()
+ self.parse_single_line(line)
+
+
+# BaseLogger {{{1
+class BaseLogger(object):
+ """ Base class in charge of logging handling logic such as creating logging
+ files, dirs, attaching to the console output and managing its output.
+
+ Attributes:
+ LEVELS (dict): flat copy of the `LOG_LEVELS` attribute of the `log` module.
+
+ TODO: status? There may be a status object or status capability in
+ either logging or config that allows you to count the number of
+ error,critical,fatal messages for us to count up at the end (aiming
+ for 0).
+ """
+ LEVELS = LOG_LEVELS
+
+ def __init__(
+ self, log_level=INFO,
+ log_format='%(message)s',
+ log_date_format='%H:%M:%S',
+ log_name='test',
+ log_to_console=True,
+ log_dir='.',
+ log_to_raw=False,
+ logger_name='',
+ append_to_log=False,
+ ):
+ """ BaseLogger constructor
+
+ Args:
+ log_level (str, optional): mozharness log level name. Defaults to INFO.
+ log_format (str, optional): message format string to instantiate a
+ `logging.Formatter`. Defaults to '%(message)s'
+ log_date_format (str, optional): date format string to instantiate a
+ `logging.Formatter`. Defaults to '%H:%M:%S'
+ log_name (str, optional): name to use for the log files to be created.
+ Defaults to 'test'
+ log_to_console (bool, optional): set to True in order to create a Handler
+ instance base on the `Logger`
+ current instance. Defaults to True.
+ log_dir (str, optional): directory location to store the log files.
+ Defaults to '.', i.e. current working directory.
+ log_to_raw (bool, optional): set to True in order to create a *raw.log
+ file. Defaults to False.
+ logger_name (str, optional): currently useless parameter. According
+ to the code comments, it could be useful
+ if we were to have multiple logging
+ objects that don't trample each other.
+ append_to_log (bool, optional): set to True if the logging content should
+ be appended to old logging files. Defaults to False
+ """
+
+ self.log_format = log_format
+ self.log_date_format = log_date_format
+ self.log_to_console = log_to_console
+ self.log_to_raw = log_to_raw
+ self.log_level = log_level
+ self.log_name = log_name
+ self.log_dir = log_dir
+ self.append_to_log = append_to_log
+
+ # Not sure what I'm going to use this for; useless unless we
+ # can have multiple logging objects that don't trample each other
+ self.logger_name = logger_name
+
+ self.all_handlers = []
+ self.log_files = {}
+
+ self.create_log_dir()
+
+ def create_log_dir(self):
+ """ create a logging directory if it doesn't exits. If there is a file with
+ same name as the future logging directory it will be deleted.
+ """
+
+ if os.path.exists(self.log_dir):
+ if not os.path.isdir(self.log_dir):
+ os.remove(self.log_dir)
+ if not os.path.exists(self.log_dir):
+ os.makedirs(self.log_dir)
+ self.abs_log_dir = os.path.abspath(self.log_dir)
+
+ def init_message(self, name=None):
+ """ log an init message stating the name passed to it, the current date
+ and time and, the current working directory.
+
+ Args:
+ name (str, optional): name to use for the init log message. Defaults to
+ the current instance class name.
+ """
+
+ if not name:
+ name = self.__class__.__name__
+ self.log_message("%s online at %s in %s" %
+ (name, datetime.now().strftime("%Y%m%d %H:%M:%S"),
+ os.getcwd()))
+
+ def get_logger_level(self, level=None):
+ """ translate the level name passed to it and return its numeric value
+ according to `LEVELS` values.
+
+ Args:
+ level (str, optional): level name to be translated. Defaults to the current
+ instance `log_level`.
+
+ Returns:
+ int: numeric value of the log level name passed to it or 0 (NOTSET) if the
+ name doesn't exists
+ """
+
+ if not level:
+ level = self.log_level
+ return self.LEVELS.get(level, logging.NOTSET)
+
+ def get_log_formatter(self, log_format=None, date_format=None):
+ """ create a `logging.Formatter` base on the log and date format.
+
+ Args:
+ log_format (str, optional): log format to use for the Formatter constructor.
+ Defaults to the current instance log format.
+ date_format (str, optional): date format to use for the Formatter constructor.
+ Defaults to the current instance date format.
+
+ Returns:
+ logging.Formatter: instance created base on the passed arguments
+ """
+
+ if not log_format:
+ log_format = self.log_format
+ if not date_format:
+ date_format = self.log_date_format
+ return logging.Formatter(log_format, date_format)
+
+ def new_logger(self):
+ """ Create a new logger based on the ROOT_LOGGER instance. By default there are no handlers.
+ The new logger becomes a member variable of the current instance as `self.logger`.
+ """
+
+ self.logger = ROOT_LOGGER
+ self.logger.setLevel(self.get_logger_level())
+ self._clear_handlers()
+ if self.log_to_console:
+ self.add_console_handler()
+ if self.log_to_raw:
+ self.log_files['raw'] = '%s_raw.log' % self.log_name
+ self.add_file_handler(os.path.join(self.abs_log_dir,
+ self.log_files['raw']),
+ log_format='%(message)s')
+
+ def _clear_handlers(self):
+ """ remove all handlers stored in `self.all_handlers`.
+
+ To prevent dups -- logging will preserve Handlers across
+ objects :(
+ """
+ attrs = dir(self)
+ if 'all_handlers' in attrs and 'logger' in attrs:
+ for handler in self.all_handlers:
+ self.logger.removeHandler(handler)
+ self.all_handlers = []
+
+ def __del__(self):
+ """ BaseLogger class destructor; shutdown, flush and remove all handlers"""
+ logging.shutdown()
+ self._clear_handlers()
+
+ def add_console_handler(self, log_level=None, log_format=None,
+ date_format=None):
+ """ create a `logging.StreamHandler` using `sys.stderr` for logging the console
+ output and add it to the `all_handlers` member variable
+
+ Args:
+ log_level (str, optional): useless argument. Not used here.
+ Defaults to None.
+ log_format (str, optional): format used for the Formatter attached to the
+ StreamHandler. Defaults to None.
+ date_format (str, optional): format used for the Formatter attached to the
+ StreamHandler. Defaults to None.
+ """
+
+ console_handler = logging.StreamHandler()
+ console_handler.setFormatter(self.get_log_formatter(log_format=log_format,
+ date_format=date_format))
+ self.logger.addHandler(console_handler)
+ self.all_handlers.append(console_handler)
+
+ def add_file_handler(self, log_path, log_level=None, log_format=None,
+ date_format=None):
+ """ create a `logging.FileHandler` base on the path, log and date format
+ and add it to the `all_handlers` member variable.
+
+ Args:
+ log_path (str): filepath to use for the `FileHandler`.
+ log_level (str, optional): useless argument. Not used here.
+ Defaults to None.
+ log_format (str, optional): log format to use for the Formatter constructor.
+ Defaults to the current instance log format.
+ date_format (str, optional): date format to use for the Formatter constructor.
+ Defaults to the current instance date format.
+ """
+
+ if not self.append_to_log and os.path.exists(log_path):
+ os.remove(log_path)
+ file_handler = logging.FileHandler(log_path)
+ file_handler.setLevel(self.get_logger_level(log_level))
+ file_handler.setFormatter(self.get_log_formatter(log_format=log_format,
+ date_format=date_format))
+ self.logger.addHandler(file_handler)
+ self.all_handlers.append(file_handler)
+
+ def log_message(self, message, level=INFO, exit_code=-1, post_fatal_callback=None):
+ """ Generic log method.
+ There should be more options here -- do or don't split by line,
+ use os.linesep instead of assuming \n, be able to pass in log level
+ by name or number.
+
+ Adding the IGNORE special level for runCommand.
+
+ Args:
+ message (str): message to log using the current `logger`
+ level (str, optional): log level of the message. Defaults to INFO.
+ exit_code (int, optional): exit code to use in case of a FATAL level is used.
+ Defaults to -1.
+ post_fatal_callback (function, optional): function to callback in case of
+ of a fatal log level. Defaults None.
+ """
+
+ if level == IGNORE:
+ return
+ for line in message.splitlines():
+ self.logger.log(self.get_logger_level(level), line)
+ if level == FATAL:
+ if callable(post_fatal_callback):
+ self.logger.log(FATAL_LEVEL, "Running post_fatal callback...")
+ post_fatal_callback(message=message, exit_code=exit_code)
+ self.logger.log(FATAL_LEVEL, 'Exiting %d' % exit_code)
+ raise SystemExit(exit_code)
+
+
+# SimpleFileLogger {{{1
+class SimpleFileLogger(BaseLogger):
+ """ Subclass of the BaseLogger.
+
+ Create one logFile. Possibly also output to the terminal and a raw log
+ (no prepending of level or date)
+ """
+
+ def __init__(self,
+ log_format='%(asctime)s %(levelname)8s - %(message)s',
+ logger_name='Simple', log_dir='logs', **kwargs):
+ """ SimpleFileLogger constructor. Calls its superclass constructor,
+ creates a new logger instance and log an init message.
+
+ Args:
+ log_format (str, optional): message format string to instantiate a
+ `logging.Formatter`. Defaults to
+ '%(asctime)s %(levelname)8s - %(message)s'
+ log_name (str, optional): name to use for the log files to be created.
+ Defaults to 'Simple'
+ log_dir (str, optional): directory location to store the log files.
+ Defaults to 'logs'
+ **kwargs: Arbitrary keyword arguments passed to the BaseLogger constructor
+ """
+
+ BaseLogger.__init__(self, logger_name=logger_name, log_format=log_format,
+ log_dir=log_dir, **kwargs)
+ self.new_logger()
+ self.init_message()
+
+ def new_logger(self):
+ """ calls the BaseLogger.new_logger method and adds a file handler to it."""
+
+ BaseLogger.new_logger(self)
+ self.log_path = os.path.join(self.abs_log_dir, '%s.log' % self.log_name)
+ self.log_files['default'] = self.log_path
+ self.add_file_handler(self.log_path)
+
+
+# MultiFileLogger {{{1
+class MultiFileLogger(BaseLogger):
+ """Subclass of the BaseLogger class. Create a log per log level in log_dir.
+ Possibly also output to the terminal and a raw log (no prepending of level or date)
+ """
+
+ def __init__(self, logger_name='Multi',
+ log_format='%(asctime)s %(levelname)8s - %(message)s',
+ log_dir='logs', log_to_raw=True, **kwargs):
+ """ MultiFileLogger constructor. Calls its superclass constructor,
+ creates a new logger instance and log an init message.
+
+ Args:
+ log_format (str, optional): message format string to instantiate a
+ `logging.Formatter`. Defaults to
+ '%(asctime)s %(levelname)8s - %(message)s'
+ log_name (str, optional): name to use for the log files to be created.
+ Defaults to 'Multi'
+ log_dir (str, optional): directory location to store the log files.
+ Defaults to 'logs'
+ log_to_raw (bool, optional): set to True in order to create a *raw.log
+ file. Defaults to False.
+ **kwargs: Arbitrary keyword arguments passed to the BaseLogger constructor
+ """
+
+ BaseLogger.__init__(self, logger_name=logger_name,
+ log_format=log_format,
+ log_to_raw=log_to_raw, log_dir=log_dir,
+ **kwargs)
+
+ self.new_logger()
+ self.init_message()
+
+ def new_logger(self):
+ """ calls the BaseLogger.new_logger method and adds a file handler per
+ logging level in the `LEVELS` class attribute.
+ """
+
+ BaseLogger.new_logger(self)
+ min_logger_level = self.get_logger_level(self.log_level)
+ for level in self.LEVELS.keys():
+ if self.get_logger_level(level) >= min_logger_level:
+ self.log_files[level] = '%s_%s.log' % (self.log_name,
+ level)
+ self.add_file_handler(os.path.join(self.abs_log_dir,
+ self.log_files[level]),
+ log_level=level)
+
+
+def numeric_log_level(level):
+ """Converts a mozharness log level (string) to the corresponding logger
+ level (number). This function makes possible to set the log level
+ in functions that do not inherit from LogMixin
+
+ Args:
+ level (str): log level name to convert.
+
+ Returns:
+ int: numeric value of the log level name.
+ """
+ return LOG_LEVELS[level]
+
+# __main__ {{{1
+if __name__ == '__main__':
+ """ Useless comparison, due to the `pass` keyword on its body"""
+ pass
diff --git a/testing/mozharness/mozharness/base/parallel.py b/testing/mozharness/mozharness/base/parallel.py
new file mode 100755
index 000000000..b20b9c97c
--- /dev/null
+++ b/testing/mozharness/mozharness/base/parallel.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic ways to parallelize jobs.
+"""
+
+
+# ChunkingMixin {{{1
+class ChunkingMixin(object):
+ """Generic signing helper methods.
+ """
+ def query_chunked_list(self, possible_list, this_chunk, total_chunks,
+ sort=False):
+ """Split a list of items into a certain number of chunks and
+ return the subset of that will occur in this chunk.
+
+ Ported from build.l10n.getLocalesForChunk in build/tools.
+ """
+ if sort:
+ possible_list = sorted(possible_list)
+ else:
+ # Copy to prevent altering
+ possible_list = possible_list[:]
+ length = len(possible_list)
+ for c in range(1, total_chunks + 1):
+ n = length / total_chunks
+ # If the total number of items isn't evenly divisible by the
+ # number of chunks, we need to append one more onto some chunks
+ if c <= (length % total_chunks):
+ n += 1
+ if c == this_chunk:
+ return possible_list[0:n]
+ del possible_list[0:n]
diff --git a/testing/mozharness/mozharness/base/python.py b/testing/mozharness/mozharness/base/python.py
new file mode 100644
index 000000000..cb5bfbc46
--- /dev/null
+++ b/testing/mozharness/mozharness/base/python.py
@@ -0,0 +1,743 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+'''Python usage, esp. virtualenv.
+'''
+
+import errno
+import os
+import subprocess
+import sys
+import json
+import socket
+import traceback
+import urlparse
+
+import mozharness
+from mozharness.base.script import (
+ PostScriptAction,
+ PostScriptRun,
+ PreScriptAction,
+ ScriptMixin,
+)
+from mozharness.base.errors import VirtualenvErrorList
+from mozharness.base.log import WARNING, FATAL
+from mozharness.mozilla.proxxy import Proxxy
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+def get_tlsv1_post():
+ # Monkeypatch to work around SSL errors in non-bleeding-edge Python.
+ # Taken from https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
+ import requests
+ from requests.packages.urllib3.poolmanager import PoolManager
+ import ssl
+
+ class TLSV1Adapter(requests.adapters.HTTPAdapter):
+ def init_poolmanager(self, connections, maxsize, block=False):
+ self.poolmanager = PoolManager(num_pools=connections,
+ maxsize=maxsize,
+ block=block,
+ ssl_version=ssl.PROTOCOL_TLSv1)
+ s = requests.Session()
+ s.mount('https://', TLSV1Adapter())
+ return s.post
+
+# Virtualenv {{{1
+virtualenv_config_options = [
+ [["--virtualenv-path"], {
+ "action": "store",
+ "dest": "virtualenv_path",
+ "default": "venv",
+ "help": "Specify the path to the virtualenv top level directory"
+ }],
+ [["--find-links"], {
+ "action": "extend",
+ "dest": "find_links",
+ "help": "URL to look for packages at"
+ }],
+ [["--pip-index"], {
+ "action": "store_true",
+ "default": True,
+ "dest": "pip_index",
+ "help": "Use pip indexes (default)"
+ }],
+ [["--no-pip-index"], {
+ "action": "store_false",
+ "dest": "pip_index",
+ "help": "Don't use pip indexes"
+ }],
+]
+
+
+class VirtualenvMixin(object):
+ '''BaseScript mixin, designed to create and use virtualenvs.
+
+ Config items:
+ * virtualenv_path points to the virtualenv location on disk.
+ * virtualenv_modules lists the module names.
+ * MODULE_url list points to the module URLs (optional)
+ Requires virtualenv to be in PATH.
+ Depends on ScriptMixin
+ '''
+ python_paths = {}
+ site_packages_path = None
+
+ def __init__(self, *args, **kwargs):
+ self._virtualenv_modules = []
+ super(VirtualenvMixin, self).__init__(*args, **kwargs)
+
+ def register_virtualenv_module(self, name=None, url=None, method=None,
+ requirements=None, optional=False,
+ two_pass=False, editable=False):
+ """Register a module to be installed with the virtualenv.
+
+ This method can be called up until create_virtualenv() to register
+ modules that should be installed in the virtualenv.
+
+ See the documentation for install_module for how the arguments are
+ applied.
+ """
+ self._virtualenv_modules.append((name, url, method, requirements,
+ optional, two_pass, editable))
+
+ def query_virtualenv_path(self):
+ """Determine the absolute path to the virtualenv."""
+ dirs = self.query_abs_dirs()
+
+ if 'abs_virtualenv_dir' in dirs:
+ return dirs['abs_virtualenv_dir']
+
+ p = self.config['virtualenv_path']
+ if not p:
+ self.fatal('virtualenv_path config option not set; '
+ 'this should never happen')
+
+ if os.path.isabs(p):
+ return p
+ else:
+ return os.path.join(dirs['abs_work_dir'], p)
+
+ def query_python_path(self, binary="python"):
+ """Return the path of a binary inside the virtualenv, if
+ c['virtualenv_path'] is set; otherwise return the binary name.
+ Otherwise return None
+ """
+ if binary not in self.python_paths:
+ bin_dir = 'bin'
+ if self._is_windows():
+ bin_dir = 'Scripts'
+ virtualenv_path = self.query_virtualenv_path()
+ self.python_paths[binary] = os.path.abspath(os.path.join(virtualenv_path, bin_dir, binary))
+
+ return self.python_paths[binary]
+
+ def query_python_site_packages_path(self):
+ if self.site_packages_path:
+ return self.site_packages_path
+ python = self.query_python_path()
+ self.site_packages_path = self.get_output_from_command(
+ [python, '-c',
+ 'from distutils.sysconfig import get_python_lib; ' +
+ 'print(get_python_lib())'])
+ return self.site_packages_path
+
+ def package_versions(self, pip_freeze_output=None, error_level=WARNING, log_output=False):
+ """
+ reads packages from `pip freeze` output and returns a dict of
+ {package_name: 'version'}
+ """
+ packages = {}
+
+ if pip_freeze_output is None:
+ # get the output from `pip freeze`
+ pip = self.query_python_path("pip")
+ if not pip:
+ self.log("package_versions: Program pip not in path", level=error_level)
+ return {}
+ pip_freeze_output = self.get_output_from_command([pip, "freeze"], silent=True, ignore_errors=True)
+ if not isinstance(pip_freeze_output, basestring):
+ self.fatal("package_versions: Error encountered running `pip freeze`: %s" % pip_freeze_output)
+
+ for line in pip_freeze_output.splitlines():
+ # parse the output into package, version
+ line = line.strip()
+ if not line:
+ # whitespace
+ continue
+ if line.startswith('-'):
+ # not a package, probably like '-e http://example.com/path#egg=package-dev'
+ continue
+ if '==' not in line:
+ self.fatal("pip_freeze_packages: Unrecognized output line: %s" % line)
+ package, version = line.split('==', 1)
+ packages[package] = version
+
+ if log_output:
+ self.info("Current package versions:")
+ for package in sorted(packages):
+ self.info(" %s == %s" % (package, packages[package]))
+
+ return packages
+
+ def is_python_package_installed(self, package_name, error_level=WARNING):
+ """
+ Return whether the package is installed
+ """
+ packages = self.package_versions(error_level=error_level).keys()
+ return package_name.lower() in [package.lower() for package in packages]
+
+ def install_module(self, module=None, module_url=None, install_method=None,
+ requirements=(), optional=False, global_options=[],
+ no_deps=False, editable=False):
+ """
+ Install module via pip.
+
+ module_url can be a url to a python package tarball, a path to
+ a directory containing a setup.py (absolute or relative to work_dir)
+ or None, in which case it will default to the module name.
+
+ requirements is a list of pip requirements files. If specified, these
+ will be combined with the module_url (if any), like so:
+
+ pip install -r requirements1.txt -r requirements2.txt module_url
+ """
+ c = self.config
+ dirs = self.query_abs_dirs()
+ env = self.query_env()
+ venv_path = self.query_virtualenv_path()
+ self.info("Installing %s into virtualenv %s" % (module, venv_path))
+ if not module_url:
+ module_url = module
+ if install_method in (None, 'pip'):
+ if not module_url and not requirements:
+ self.fatal("Must specify module and/or requirements")
+ pip = self.query_python_path("pip")
+ if c.get("verbose_pip"):
+ command = [pip, "-v", "install"]
+ else:
+ command = [pip, "install"]
+ if no_deps:
+ command += ["--no-deps"]
+ # To avoid timeouts with our pypi server, increase default timeout:
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=1007230#c802
+ command += ['--timeout', str(c.get('pip_timeout', 120))]
+ for requirement in requirements:
+ command += ["-r", requirement]
+ if c.get('find_links') and not c["pip_index"]:
+ command += ['--no-index']
+ for opt in global_options:
+ command += ["--global-option", opt]
+ elif install_method == 'easy_install':
+ if not module:
+ self.fatal("module parameter required with install_method='easy_install'")
+ if requirements:
+ # Install pip requirements files separately, since they're
+ # not understood by easy_install.
+ self.install_module(requirements=requirements,
+ install_method='pip')
+ # Allow easy_install to be overridden by
+ # self.config['exes']['easy_install']
+ default = 'easy_install'
+ command = self.query_exe('easy_install', default=default, return_type="list")
+ else:
+ self.fatal("install_module() doesn't understand an install_method of %s!" % install_method)
+
+ # Add --find-links pages to look at. Add --trusted-host automatically if
+ # the host isn't secure. This allows modern versions of pip to connect
+ # without requiring an override.
+ proxxy = Proxxy(self.config, self.log_obj)
+ trusted_hosts = set()
+ for link in proxxy.get_proxies_and_urls(c.get('find_links', [])):
+ parsed = urlparse.urlparse(link)
+
+ try:
+ socket.gethostbyname(parsed.hostname)
+ except socket.gaierror as e:
+ self.info('error resolving %s (ignoring): %s' %
+ (parsed.hostname, e.message))
+ continue
+
+ command.extend(["--find-links", link])
+ if parsed.scheme != 'https':
+ trusted_hosts.add(parsed.hostname)
+
+ if install_method != 'easy_install':
+ for host in sorted(trusted_hosts):
+ command.extend(['--trusted-host', host])
+
+ # module_url can be None if only specifying requirements files
+ if module_url:
+ if editable:
+ if install_method in (None, 'pip'):
+ command += ['-e']
+ else:
+ self.fatal("editable installs not supported for install_method %s" % install_method)
+ command += [module_url]
+
+ # If we're only installing a single requirements file, use
+ # the file's directory as cwd, so relative paths work correctly.
+ cwd = dirs['abs_work_dir']
+ if not module and len(requirements) == 1:
+ cwd = os.path.dirname(requirements[0])
+
+ quoted_command = subprocess.list2cmdline(command)
+ # Allow for errors while building modules, but require a
+ # return status of 0.
+ self.retry(
+ self.run_command,
+ # None will cause default value to be used
+ attempts=1 if optional else None,
+ good_statuses=(0,),
+ error_level=WARNING if optional else FATAL,
+ error_message='Could not install python package: ' + quoted_command + ' failed after %(attempts)d tries!',
+ args=[command, ],
+ kwargs={
+ 'error_list': VirtualenvErrorList,
+ 'cwd': cwd,
+ 'env': env,
+ # WARNING only since retry will raise final FATAL if all
+ # retry attempts are unsuccessful - and we only want
+ # an ERROR of FATAL if *no* retry attempt works
+ 'error_level': WARNING,
+ }
+ )
+
+ def create_virtualenv(self, modules=(), requirements=()):
+ """
+ Create a python virtualenv.
+
+ The virtualenv exe can be defined in c['virtualenv'] or
+ c['exes']['virtualenv'], as a string (path) or list (path +
+ arguments).
+
+ c['virtualenv_python_dll'] is an optional config item that works
+ around an old windows virtualenv bug.
+
+ virtualenv_modules can be a list of module names to install, e.g.
+
+ virtualenv_modules = ['module1', 'module2']
+
+ or it can be a heterogeneous list of modules names and dicts that
+ define a module by its name, url-or-path, and a list of its global
+ options.
+
+ virtualenv_modules = [
+ {
+ 'name': 'module1',
+ 'url': None,
+ 'global_options': ['--opt', '--without-gcc']
+ },
+ {
+ 'name': 'module2',
+ 'url': 'http://url/to/package',
+ 'global_options': ['--use-clang']
+ },
+ {
+ 'name': 'module3',
+ 'url': os.path.join('path', 'to', 'setup_py', 'dir')
+ 'global_options': []
+ },
+ 'module4'
+ ]
+
+ virtualenv_requirements is an optional list of pip requirements files to
+ use when invoking pip, e.g.,
+
+ virtualenv_requirements = [
+ '/path/to/requirements1.txt',
+ '/path/to/requirements2.txt'
+ ]
+ """
+ c = self.config
+ dirs = self.query_abs_dirs()
+ venv_path = self.query_virtualenv_path()
+ self.info("Creating virtualenv %s" % venv_path)
+
+ # Always use the virtualenv that is vendored since that is deterministic.
+ # TODO Bug 1408051 - Use the copy of virtualenv under
+ # third_party/python/virtualenv once everything is off buildbot
+ virtualenv = [
+ sys.executable,
+ os.path.join(external_tools_path, 'virtualenv', 'virtualenv.py'),
+ ]
+ virtualenv_options = c.get('virtualenv_options', [])
+ # Don't create symlinks. If we don't do this, permissions issues may
+ # hinder virtualenv creation or operation.
+ virtualenv_options.append('--always-copy')
+
+ if os.path.exists(self.query_python_path()):
+ self.info("Virtualenv %s appears to already exist; skipping virtualenv creation." % self.query_python_path())
+ else:
+ self.mkdir_p(dirs['abs_work_dir'])
+ self.run_command(virtualenv + virtualenv_options + [venv_path],
+ cwd=dirs['abs_work_dir'],
+ error_list=VirtualenvErrorList,
+ partial_env={'VIRTUALENV_NO_DOWNLOAD': "1"},
+ halt_on_failure=True)
+
+ if not modules:
+ modules = c.get('virtualenv_modules', [])
+ if not requirements:
+ requirements = c.get('virtualenv_requirements', [])
+ if not modules and requirements:
+ self.install_module(requirements=requirements,
+ install_method='pip')
+ for module in modules:
+ module_url = module
+ global_options = []
+ if isinstance(module, dict):
+ if module.get('name', None):
+ module_name = module['name']
+ else:
+ self.fatal("Can't install module without module name: %s" %
+ str(module))
+ module_url = module.get('url', None)
+ global_options = module.get('global_options', [])
+ else:
+ module_url = self.config.get('%s_url' % module, module_url)
+ module_name = module
+ install_method = 'pip'
+ if module_name in ('pywin32',):
+ install_method = 'easy_install'
+ self.install_module(module=module_name,
+ module_url=module_url,
+ install_method=install_method,
+ requirements=requirements,
+ global_options=global_options)
+
+ for module, url, method, requirements, optional, two_pass, editable in \
+ self._virtualenv_modules:
+ if two_pass:
+ self.install_module(
+ module=module, module_url=url,
+ install_method=method, requirements=requirements or (),
+ optional=optional, no_deps=True, editable=editable
+ )
+ self.install_module(
+ module=module, module_url=url,
+ install_method=method, requirements=requirements or (),
+ optional=optional, editable=editable
+ )
+
+ self.info("Done creating virtualenv %s." % venv_path)
+
+ self.package_versions(log_output=True)
+
+ def activate_virtualenv(self):
+ """Import the virtualenv's packages into this Python interpreter."""
+ bin_dir = os.path.dirname(self.query_python_path())
+ activate = os.path.join(bin_dir, 'activate_this.py')
+ execfile(activate, dict(__file__=activate))
+
+
+# This is (sadly) a mixin for logging methods.
+class PerfherderResourceOptionsMixin(ScriptMixin):
+ def perfherder_resource_options(self):
+ """Obtain a list of extraOptions values to identify the env."""
+ opts = []
+
+ if 'TASKCLUSTER_INSTANCE_TYPE' in os.environ:
+ # Include the instance type so results can be grouped.
+ opts.append('taskcluster-%s' % os.environ['TASKCLUSTER_INSTANCE_TYPE'])
+ else:
+ # We assume !taskcluster => buildbot.
+ instance = 'unknown'
+
+ # Try to load EC2 instance type from metadata file. This file
+ # may not exist in many scenarios (including when inside a chroot).
+ # So treat it as optional.
+ # TODO support Windows.
+ try:
+ # This file should exist on Linux in EC2.
+ with open('/etc/instance_metadata.json', 'rb') as fh:
+ im = json.load(fh)
+ instance = im['aws_instance_type'].encode('ascii')
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ self.info('instance_metadata.json not found; unable to '
+ 'determine instance type')
+ except Exception:
+ self.warning('error reading instance_metadata: %s' %
+ traceback.format_exc())
+
+ opts.append('buildbot-%s' % instance)
+
+ return opts
+
+
+class ResourceMonitoringMixin(PerfherderResourceOptionsMixin):
+ """Provides resource monitoring capabilities to scripts.
+
+ When this class is in the inheritance chain, resource usage stats of the
+ executing script will be recorded.
+
+ This class requires the VirtualenvMixin in order to install a package used
+ for recording resource usage.
+
+ While we would like to record resource usage for the entirety of a script,
+ since we require an external package, we can only record resource usage
+ after that package is installed (as part of creating the virtualenv).
+ That's just the way things have to be.
+ """
+ def __init__(self, *args, **kwargs):
+ super(ResourceMonitoringMixin, self).__init__(*args, **kwargs)
+
+ self.register_virtualenv_module('psutil>=3.1.1', method='pip',
+ optional=True)
+ self.register_virtualenv_module('mozsystemmonitor==0.3',
+ method='pip', optional=True)
+ self.register_virtualenv_module('jsonschema==2.5.1',
+ method='pip')
+ # explicitly install functools32, because some slaves aren't using
+ # a version of pip recent enough to install it automatically with
+ # jsonschema (which depends on it)
+ # https://github.com/Julian/jsonschema/issues/233
+ self.register_virtualenv_module('functools32==3.2.3-2',
+ method='pip')
+ self._resource_monitor = None
+
+ # 2-tuple of (name, options) to assign Perfherder resource monitor
+ # metrics to. This needs to be assigned by a script in order for
+ # Perfherder metrics to be reported.
+ self.resource_monitor_perfherder_id = None
+
+ @PostScriptAction('create-virtualenv')
+ def _start_resource_monitoring(self, action, success=None):
+ self.activate_virtualenv()
+
+ # Resource Monitor requires Python 2.7, however it's currently optional.
+ # Remove when all machines have had their Python version updated (bug 711299).
+ if sys.version_info[:2] < (2, 7):
+ self.warning('Resource monitoring will not be enabled! Python 2.7+ required.')
+ return
+
+ try:
+ from mozsystemmonitor.resourcemonitor import SystemResourceMonitor
+
+ self.info("Starting resource monitoring.")
+ self._resource_monitor = SystemResourceMonitor(poll_interval=1.0)
+ self._resource_monitor.start()
+ except Exception:
+ self.warning("Unable to start resource monitor: %s" %
+ traceback.format_exc())
+
+ @PreScriptAction
+ def _resource_record_pre_action(self, action):
+ # Resource monitor isn't available until after create-virtualenv.
+ if not self._resource_monitor:
+ return
+
+ self._resource_monitor.begin_phase(action)
+
+ @PostScriptAction
+ def _resource_record_post_action(self, action, success=None):
+ # Resource monitor isn't available until after create-virtualenv.
+ if not self._resource_monitor:
+ return
+
+ self._resource_monitor.finish_phase(action)
+
+ @PostScriptRun
+ def _resource_record_post_run(self):
+ if not self._resource_monitor:
+ return
+
+ # This should never raise an exception. This is a workaround until
+ # mozsystemmonitor is fixed. See bug 895388.
+ try:
+ self._resource_monitor.stop()
+ self._log_resource_usage()
+
+ # Upload a JSON file containing the raw resource data.
+ try:
+ upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.exists(upload_dir):
+ os.makedirs(upload_dir)
+ with open(os.path.join(upload_dir, 'resource-usage.json'), 'wb') as fh:
+ json.dump(self._resource_monitor.as_dict(), fh,
+ sort_keys=True, indent=4)
+ except (AttributeError, KeyError):
+ self.exception('could not upload resource usage JSON',
+ level=WARNING)
+
+ except Exception:
+ self.warning("Exception when reporting resource usage: %s" %
+ traceback.format_exc())
+
+ def _log_resource_usage(self):
+ # Delay import because not available until virtualenv is populated.
+ import jsonschema
+
+ rm = self._resource_monitor
+
+ if rm.start_time is None:
+ return
+
+ def resources(phase):
+ cpu_percent = rm.aggregate_cpu_percent(phase=phase, per_cpu=False)
+ cpu_times = rm.aggregate_cpu_times(phase=phase, per_cpu=False)
+ io = rm.aggregate_io(phase=phase)
+
+ swap_in = sum(m.swap.sin for m in rm.measurements)
+ swap_out = sum(m.swap.sout for m in rm.measurements)
+
+ return cpu_percent, cpu_times, io, (swap_in, swap_out)
+
+ def log_usage(prefix, duration, cpu_percent, cpu_times, io):
+ message = '{prefix} - Wall time: {duration:.0f}s; ' \
+ 'CPU: {cpu_percent}; ' \
+ 'Read bytes: {io_read_bytes}; Write bytes: {io_write_bytes}; ' \
+ 'Read time: {io_read_time}; Write time: {io_write_time}'
+
+ # XXX Some test harnesses are complaining about a string being
+ # being fed into a 'f' formatter. This will help diagnose the
+ # issue.
+ cpu_percent_str = str(round(cpu_percent)) + '%' if cpu_percent else "Can't collect data"
+
+ try:
+ self.info(
+ message.format(
+ prefix=prefix, duration=duration,
+ cpu_percent=cpu_percent_str, io_read_bytes=io.read_bytes,
+ io_write_bytes=io.write_bytes, io_read_time=io.read_time,
+ io_write_time=io.write_time
+ )
+ )
+
+ except ValueError:
+ self.warning("Exception when formatting: %s" %
+ traceback.format_exc())
+
+ cpu_percent, cpu_times, io, (swap_in, swap_out) = resources(None)
+ duration = rm.end_time - rm.start_time
+
+ # Write out Perfherder data if configured.
+ if self.resource_monitor_perfherder_id:
+ perfherder_name, perfherder_options = self.resource_monitor_perfherder_id
+
+ suites = []
+ overall = []
+
+ if cpu_percent:
+ overall.append({
+ 'name': 'cpu_percent',
+ 'value': cpu_percent,
+ })
+
+ overall.extend([
+ {'name': 'io_write_bytes', 'value': io.write_bytes},
+ {'name': 'io.read_bytes', 'value': io.read_bytes},
+ {'name': 'io_write_time', 'value': io.write_time},
+ {'name': 'io_read_time', 'value': io.read_time},
+ ])
+
+ suites.append({
+ 'name': '%s.overall' % perfherder_name,
+ 'extraOptions': perfherder_options + self.perfherder_resource_options(),
+ 'subtests': overall,
+
+ })
+
+ for phase in rm.phases.keys():
+ phase_duration = rm.phases[phase][1] - rm.phases[phase][0]
+ subtests = [
+ {
+ 'name': 'time',
+ 'value': phase_duration,
+ }
+ ]
+ cpu_percent = rm.aggregate_cpu_percent(phase=phase,
+ per_cpu=False)
+ if cpu_percent is not None:
+ subtests.append({
+ 'name': 'cpu_percent',
+ 'value': rm.aggregate_cpu_percent(phase=phase,
+ per_cpu=False),
+ })
+
+ # We don't report I/O during each step because measured I/O
+ # is system I/O and that I/O can be delayed (e.g. writes will
+ # buffer before being flushed and recorded in our metrics).
+ suites.append({
+ 'name': '%s.%s' % (perfherder_name, phase),
+ 'subtests': subtests,
+ })
+
+ data = {
+ 'framework': {'name': 'job_resource_usage'},
+ 'suites': suites,
+ }
+
+ schema_path = os.path.join(external_tools_path,
+ 'performance-artifact-schema.json')
+ with open(schema_path, 'rb') as fh:
+ schema = json.load(fh)
+
+ # this will throw an exception that causes the job to fail if the
+ # perfherder data is not valid -- please don't change this
+ # behaviour, otherwise people will inadvertently break this
+ # functionality
+ self.info('Validating Perfherder data against %s' % schema_path)
+ jsonschema.validate(data, schema)
+ self.info('PERFHERDER_DATA: %s' % json.dumps(data))
+
+ log_usage('Total resource usage', duration, cpu_percent, cpu_times, io)
+
+ # Print special messages so usage shows up in Treeherder.
+ if cpu_percent:
+ self._tinderbox_print('CPU usage<br/>{:,.1f}%'.format(
+ cpu_percent))
+
+ self._tinderbox_print('I/O read bytes / time<br/>{:,} / {:,}'.format(
+ io.read_bytes, io.read_time))
+ self._tinderbox_print('I/O write bytes / time<br/>{:,} / {:,}'.format(
+ io.write_bytes, io.write_time))
+
+ # Print CPU components having >1%. "cpu_times" is a data structure
+ # whose attributes are measurements. Ideally we'd have an API that
+ # returned just the measurements as a dict or something.
+ cpu_attrs = []
+ for attr in sorted(dir(cpu_times)):
+ if attr.startswith('_'):
+ continue
+ if attr in ('count', 'index'):
+ continue
+ cpu_attrs.append(attr)
+
+ cpu_total = sum(getattr(cpu_times, attr) for attr in cpu_attrs)
+
+ for attr in cpu_attrs:
+ value = getattr(cpu_times, attr)
+ percent = value / cpu_total * 100.0
+ if percent > 1.00:
+ self._tinderbox_print('CPU {}<br/>{:,.1f} ({:,.1f}%)'.format(
+ attr, value, percent))
+
+ # Swap on Windows isn't reported by psutil.
+ if not self._is_windows():
+ self._tinderbox_print('Swap in / out<br/>{:,} / {:,}'.format(
+ swap_in, swap_out))
+
+ for phase in rm.phases.keys():
+ start_time, end_time = rm.phases[phase]
+ cpu_percent, cpu_times, io, swap = resources(phase)
+ log_usage(phase, end_time - start_time, cpu_percent, cpu_times, io)
+
+ def _tinderbox_print(self, message):
+ self.info('TinderboxPrint: %s' % message)
+
+
+# __main__ {{{1
+
+if __name__ == '__main__':
+ '''TODO: unit tests.
+ '''
+ pass
diff --git a/testing/mozharness/mozharness/base/script.py b/testing/mozharness/mozharness/base/script.py
new file mode 100755
index 000000000..828f4e39e
--- /dev/null
+++ b/testing/mozharness/mozharness/base/script.py
@@ -0,0 +1,2273 @@
+
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic script objects.
+
+script.py, along with config.py and log.py, represents the core of
+mozharness.
+"""
+
+import codecs
+from contextlib import contextmanager
+import datetime
+import errno
+import fnmatch
+import functools
+import gzip
+import inspect
+import itertools
+import os
+import platform
+import pprint
+import re
+import shutil
+import socket
+import subprocess
+import sys
+import tarfile
+import time
+import traceback
+import urllib2
+import zipfile
+import httplib
+import urlparse
+import hashlib
+if os.name == 'nt':
+ try:
+ import win32file
+ import win32api
+ PYWIN32 = True
+ except ImportError:
+ PYWIN32 = False
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+from io import BytesIO
+
+from mozprocess import ProcessHandler
+from mozharness.base.config import BaseConfig
+from mozharness.base.log import SimpleFileLogger, MultiFileLogger, \
+ LogMixin, OutputParser, DEBUG, INFO, ERROR, FATAL
+
+
+class FetchedIncorrectFilesize(Exception):
+ pass
+
+
+def platform_name():
+ pm = PlatformMixin()
+
+ if pm._is_linux() and pm._is_64_bit():
+ return 'linux64'
+ elif pm._is_linux() and not pm._is_64_bit():
+ return 'linux'
+ elif pm._is_darwin():
+ return 'macosx'
+ elif pm._is_windows() and pm._is_64_bit():
+ return 'win64'
+ elif pm._is_windows() and not pm._is_64_bit():
+ return 'win32'
+ else:
+ return None
+
+
+class PlatformMixin(object):
+ def _is_windows(self):
+ """ check if the current operating system is Windows.
+
+ Returns:
+ bool: True if the current platform is Windows, False otherwise
+ """
+ system = platform.system()
+ if system in ("Windows", "Microsoft"):
+ return True
+ if system.startswith("CYGWIN"):
+ return True
+ if os.name == 'nt':
+ return True
+
+ def _is_darwin(self):
+ """ check if the current operating system is Darwin.
+
+ Returns:
+ bool: True if the current platform is Darwin, False otherwise
+ """
+ if platform.system() in ("Darwin"):
+ return True
+ if sys.platform.startswith("darwin"):
+ return True
+
+ def _is_linux(self):
+ """ check if the current operating system is a Linux distribution.
+
+ Returns:
+ bool: True if the current platform is a Linux distro, False otherwise
+ """
+ if platform.system() in ("Linux"):
+ return True
+ if sys.platform.startswith("linux"):
+ return True
+
+ def _is_64_bit(self):
+ if self._is_darwin():
+ # osx is a special snowflake and to ensure the arch, it is better to use the following
+ return sys.maxsize > 2**32 # context: https://docs.python.org/2/library/platform.html
+ else:
+ return '64' in platform.architecture()[0] # architecture() returns (bits, linkage)
+
+
+# ScriptMixin {{{1
+class ScriptMixin(PlatformMixin):
+ """This mixin contains simple filesystem commands and the like.
+
+ It also contains some very special but very complex methods that,
+ together with logging and config, provide the base for all scripts
+ in this harness.
+
+ WARNING !!!
+ This class depends entirely on `LogMixin` methods in such a way that it will
+ only works if a class inherits from both `ScriptMixin` and `LogMixin`
+ simultaneously.
+
+ Depends on self.config of some sort.
+
+ Attributes:
+ env (dict): a mapping object representing the string environment.
+ script_obj (ScriptMixin): reference to a ScriptMixin instance.
+ """
+
+ env = None
+ script_obj = None
+
+ def platform_name(self):
+ """ Return the platform name on which the script is running on.
+ Returns:
+ None: for failure to determine the platform.
+ str: The name of the platform (e.g. linux64)
+ """
+ return platform_name()
+
+ # Simple filesystem commands {{{2
+ def mkdir_p(self, path, error_level=ERROR):
+ """ Create a directory if it doesn't exists.
+ This method also logs the creation, error or current existence of the
+ directory to be created.
+
+ Args:
+ path (str): path of the directory to be created.
+ error_level (str): log level name to be used in case of error.
+
+ Returns:
+ None: for sucess.
+ int: -1 on error
+ """
+
+ if not os.path.exists(path):
+ self.info("mkdir: %s" % path)
+ try:
+ os.makedirs(path)
+ except OSError:
+ self.log("Can't create directory %s!" % path,
+ level=error_level)
+ return -1
+ else:
+ self.debug("mkdir_p: %s Already exists." % path)
+
+ def rmtree(self, path, log_level=INFO, error_level=ERROR,
+ exit_code=-1):
+ """ Delete an entire directory tree and log its result.
+ This method also logs the platform rmtree function, its retries, errors,
+ and current existence of the directory.
+
+ Args:
+ path (str): path to the directory tree root to remove.
+ log_level (str, optional): log level name to for this operation. Defaults
+ to `INFO`.
+ error_level (str, optional): log level name to use in case of error.
+ Defaults to `ERROR`.
+ exit_code (int, optional): useless parameter, not use here.
+ Defaults to -1
+
+ Returns:
+ None: for success
+ """
+
+ self.log("rmtree: %s" % path, level=log_level)
+ error_message = "Unable to remove %s!" % path
+ if self._is_windows():
+ # Call _rmtree_windows() directly, since even checking
+ # os.path.exists(path) will hang if path is longer than MAX_PATH.
+ self.info("Using _rmtree_windows ...")
+ return self.retry(
+ self._rmtree_windows,
+ error_level=error_level,
+ error_message=error_message,
+ args=(path, ),
+ log_level=log_level,
+ )
+ if os.path.exists(path):
+ if os.path.isdir(path):
+ return self.retry(
+ shutil.rmtree,
+ error_level=error_level,
+ error_message=error_message,
+ retry_exceptions=(OSError, ),
+ args=(path, ),
+ log_level=log_level,
+ )
+ else:
+ return self.retry(
+ os.remove,
+ error_level=error_level,
+ error_message=error_message,
+ retry_exceptions=(OSError, ),
+ args=(path, ),
+ log_level=log_level,
+ )
+ else:
+ self.debug("%s doesn't exist." % path)
+
+ def query_msys_path(self, path):
+ """ replaces the Windows harddrive letter path style with a linux
+ path style, e.g. C:// --> /C/
+ Note: method, not used in any script.
+
+ Args:
+ path (str?): path to convert to the linux path style.
+ Returns:
+ str: in case `path` is a string. The result is the path with the new notation.
+ type(path): `path` itself is returned in case `path` is not str type.
+ """
+ if not isinstance(path, basestring):
+ return path
+ path = path.replace("\\", "/")
+
+ def repl(m):
+ return '/%s/' % m.group(1)
+ path = re.sub(r'''^([a-zA-Z]):/''', repl, path)
+ return path
+
+ def _rmtree_windows(self, path):
+ """ Windows-specific rmtree that handles path lengths longer than MAX_PATH.
+ Ported from clobberer.py.
+
+ Args:
+ path (str): directory path to remove.
+
+ Returns:
+ None: if the path doesn't exists.
+ int: the return number of calling `self.run_command`
+ int: in case the path specified is not a directory but a file.
+ 0 on success, non-zero on error. Note: The returned value
+ is the result of calling `win32file.DeleteFile`
+ """
+
+ assert self._is_windows()
+ path = os.path.realpath(path)
+ full_path = '\\\\?\\' + path
+ if not os.path.exists(full_path):
+ return
+ if not PYWIN32:
+ if not os.path.isdir(path):
+ return self.run_command('del /F /Q "%s"' % path)
+ else:
+ return self.run_command('rmdir /S /Q "%s"' % path)
+ # Make sure directory is writable
+ win32file.SetFileAttributesW('\\\\?\\' + path, win32file.FILE_ATTRIBUTE_NORMAL)
+ # Since we call rmtree() with a file, sometimes
+ if not os.path.isdir('\\\\?\\' + path):
+ return win32file.DeleteFile('\\\\?\\' + path)
+
+ for ffrec in win32api.FindFiles('\\\\?\\' + path + '\\*.*'):
+ file_attr = ffrec[0]
+ name = ffrec[8]
+ if name == '.' or name == '..':
+ continue
+ full_name = os.path.join(path, name)
+
+ if file_attr & win32file.FILE_ATTRIBUTE_DIRECTORY:
+ self._rmtree_windows(full_name)
+ else:
+ try:
+ win32file.SetFileAttributesW('\\\\?\\' + full_name, win32file.FILE_ATTRIBUTE_NORMAL)
+ win32file.DeleteFile('\\\\?\\' + full_name)
+ except:
+ # DeleteFile fails on long paths, del /f /q works just fine
+ self.run_command('del /F /Q "%s"' % full_name)
+
+ win32file.RemoveDirectory('\\\\?\\' + path)
+
+ def get_filename_from_url(self, url):
+ """ parse a filename base on an url.
+
+ Args:
+ url (str): url to parse for the filename
+
+ Returns:
+ str: filename parsed from the url, or `netloc` network location part
+ of the url.
+ """
+
+ parsed = urlparse.urlsplit(url.rstrip('/'))
+ if parsed.path != '':
+ return parsed.path.rsplit('/', 1)[-1]
+ else:
+ return parsed.netloc
+
+ def _urlopen(self, url, **kwargs):
+ """ open the url `url` using `urllib2`.
+ This method can be overwritten to extend its complexity
+
+ Args:
+ url (str | urllib2.Request): url to open
+ kwargs: Arbitrary keyword arguments passed to the `urllib2.urlopen` function.
+
+ Returns:
+ file-like: file-like object with additional methods as defined in
+ `urllib2.urlopen`_.
+ None: None may be returned if no handler handles the request.
+
+ Raises:
+ urllib2.URLError: on errors
+
+ .. _urllib2.urlopen:
+ https://docs.python.org/2/library/urllib2.html#urllib2.urlopen
+ """
+ # http://bugs.python.org/issue13359 - urllib2 does not automatically quote the URL
+ url_quoted = urllib2.quote(url, safe='%/:=&?~#+!$,;\'@()*[]|')
+ return urllib2.urlopen(url_quoted, **kwargs)
+
+
+
+ def fetch_url_into_memory(self, url):
+ ''' Downloads a file from a url into memory instead of disk.
+
+ Args:
+ url (str): URL path where the file to be downloaded is located.
+
+ Raises:
+ IOError: When the url points to a file on disk and cannot be found
+ FetchedIncorrectFilesize: When the size of the fetched file does not match the
+ expected file size.
+ ValueError: When the scheme of a url is not what is expected.
+
+ Returns:
+ BytesIO: contents of url
+ '''
+ self.info('Fetch {} into memory'.format(url))
+ parsed_url = urlparse.urlparse(url)
+
+ if parsed_url.scheme in ('', 'file'):
+ if not os.path.isfile(url):
+ raise IOError('Could not find file to extract: {}'.format(url))
+
+ expected_file_size = os.stat(url.replace('file://', '')).st_size
+
+ # In case we're referrencing a file without file://
+ if parsed_url.scheme == '':
+ url = 'file://%s' % os.path.abspath(url)
+ parsed_url = urlparse.urlparse(url)
+
+ request = urllib2.Request(url)
+ # When calling fetch_url_into_memory() you should retry when we raise one of these exceptions:
+ # * Bug 1300663 - HTTPError: HTTP Error 404: Not Found
+ # * Bug 1300413 - HTTPError: HTTP Error 500: Internal Server Error
+ # * Bug 1300943 - HTTPError: HTTP Error 503: Service Unavailable
+ # * Bug 1300953 - URLError: <urlopen error [Errno -2] Name or service not known>
+ # * Bug 1301594 - URLError: <urlopen error [Errno 10054] An existing connection was ...
+ # * Bug 1301597 - URLError: <urlopen error [Errno 8] _ssl.c:504: EOF occurred in ...
+ # * Bug 1301855 - URLError: <urlopen error [Errno 60] Operation timed out>
+ # * Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
+ # * Bug 1301807 - BadStatusLine: ''
+ #
+ # Bug 1309912 - Adding timeout in hopes to solve blocking on response.read() (bug 1300413)
+ response = urllib2.urlopen(request, timeout=30)
+
+ if parsed_url.scheme in ('http', 'https'):
+ expected_file_size = int(response.headers.get('Content-Length'))
+
+ self.info('Http code: {}'.format(response.getcode()))
+ for k in sorted(response.headers.keys()):
+ if k.lower().startswith('x-amz-') or k in ('Content-Encoding', 'Content-Type', 'via'):
+ self.info('{}: {}'.format(k, response.headers.get(k)))
+
+ file_contents = response.read()
+ obtained_file_size = len(file_contents)
+ self.info('Expected file size: {}'.format(expected_file_size))
+ self.info('Obtained file size: {}'.format(obtained_file_size))
+
+ if obtained_file_size != expected_file_size:
+ raise FetchedIncorrectFilesize(
+ 'The expected file size is {} while we got instead {}'.format(
+ expected_file_size, obtained_file_size)
+ )
+
+ # Use BytesIO instead of StringIO
+ # http://stackoverflow.com/questions/34162017/unzip-buffer-with-python/34162395#34162395
+ return BytesIO(file_contents)
+
+
+ def _download_file(self, url, file_name):
+ """ Helper script for download_file()
+ Additionaly this function logs all exceptions as warnings before
+ re-raising them
+
+ Args:
+ url (str): string containing the URL with the file location
+ file_name (str): name of the file where the downloaded file
+ is written.
+
+ Returns:
+ str: filename of the written file on disk
+
+ Raises:
+ urllib2.URLError: on incomplete download.
+ urllib2.HTTPError: on Http error code
+ socket.timeout: on connection timeout
+ socket.error: on socket error
+ """
+ # If our URLs look like files, prefix them with file:// so they can
+ # be loaded like URLs.
+ if not (url.startswith("http") or url.startswith("file://")):
+ if not os.path.isfile(url):
+ self.fatal("The file %s does not exist" % url)
+ url = 'file://%s' % os.path.abspath(url)
+
+ try:
+ f_length = None
+ f = self._urlopen(url, timeout=30)
+
+ if f.info().get('content-length') is not None:
+ f_length = int(f.info()['content-length'])
+ got_length = 0
+ local_file = open(file_name, 'wb')
+ while True:
+ block = f.read(1024 ** 2)
+ if not block:
+ if f_length is not None and got_length != f_length:
+ raise urllib2.URLError("Download incomplete; content-length was %d, but only received %d" % (f_length, got_length))
+ break
+ local_file.write(block)
+ if f_length is not None:
+ got_length += len(block)
+ local_file.close()
+ return file_name
+ except urllib2.HTTPError, e:
+ self.warning("Server returned status %s %s for %s" % (str(e.code), str(e), url))
+ raise
+ except urllib2.URLError, e:
+ self.warning("URL Error: %s" % url)
+
+ # Failures due to missing local files won't benefit from retry.
+ # Raise the original OSError.
+ if isinstance(e.args[0], OSError) and e.args[0].errno == errno.ENOENT:
+ raise e.args[0]
+
+ remote_host = urlparse.urlsplit(url)[1]
+ if remote_host:
+ nslookup = self.query_exe('nslookup')
+ error_list = [{
+ 'substr': "server can't find %s" % remote_host,
+ 'level': ERROR,
+ 'explanation': "Either %s is an invalid hostname, or DNS is busted." % remote_host,
+ }]
+ self.run_command([nslookup, remote_host],
+ error_list=error_list)
+ raise
+ except socket.timeout, e:
+ self.warning("Timed out accessing %s: %s" % (url, str(e)))
+ raise
+ except socket.error, e:
+ self.warning("Socket error when accessing %s: %s" % (url, str(e)))
+ raise
+
+ def _retry_download(self, url, error_level, file_name=None, retry_config=None):
+ """ Helper method to retry download methods.
+
+ This method calls `self.retry` on `self._download_file` using the passed
+ parameters if a file_name is specified. If no file is specified, we will
+ instead call `self._urlopen`, which grabs the contents of a url but does
+ not create a file on disk.
+
+ Args:
+ url (str): URL path where the file is located.
+ file_name (str): file_name where the file will be written to.
+ error_level (str): log level to use in case an error occurs.
+ retry_config (dict, optional): key-value pairs to be passed to
+ `self.retry`. Defaults to `None`
+
+ Returns:
+ str: `self._download_file` return value is returned
+ unknown: `self.retry` `failure_status` is returned on failure, which
+ defaults to -1
+ """
+ retry_args = dict(
+ failure_status=None,
+ retry_exceptions=(urllib2.HTTPError, urllib2.URLError,
+ httplib.BadStatusLine,
+ socket.timeout, socket.error),
+ error_message="Can't download from %s to %s!" % (url, file_name),
+ error_level=error_level,
+ )
+
+ if retry_config:
+ retry_args.update(retry_config)
+
+ download_func = self._urlopen
+ kwargs = {"url": url}
+ if file_name:
+ download_func = self._download_file
+ kwargs = {"url": url, "file_name": file_name}
+
+ return self.retry(
+ download_func,
+ kwargs=kwargs,
+ **retry_args
+ )
+
+
+ def _filter_entries(self, namelist, extract_dirs):
+ """Filter entries of the archive based on the specified list of to extract dirs."""
+ filter_partial = functools.partial(fnmatch.filter, namelist)
+ entries = itertools.chain(*map(filter_partial, extract_dirs or ['*']))
+
+ for entry in entries:
+ yield entry
+
+
+ def unzip(self, compressed_file, extract_to, extract_dirs='*', verbose=False):
+ """This method allows to extract a zip file without writing to disk first.
+
+ Args:
+ compressed_file (object): File-like object with the contents of a compressed zip file.
+ extract_to (str): where to extract the compressed file.
+ extract_dirs (list, optional): directories inside the archive file to extract.
+ Defaults to '*'.
+ verbose (bool, optional): whether or not extracted content should be displayed.
+ Defaults to False.
+
+ Raises:
+ zipfile.BadZipfile: on contents of zipfile being invalid
+ """
+ with zipfile.ZipFile(compressed_file) as bundle:
+ entries = self._filter_entries(bundle.namelist(), extract_dirs)
+
+ for entry in entries:
+ if verbose:
+ self.info(' {}'.format(entry))
+
+ # Exception to be retried:
+ # Bug 1301645 - BadZipfile: Bad CRC-32 for file ...
+ # http://stackoverflow.com/questions/5624669/strange-badzipfile-bad-crc-32-problem/5626098#5626098
+ # Bug 1301802 - error: Error -3 while decompressing: invalid stored block lengths
+ bundle.extract(entry, path=extract_to)
+
+ # ZipFile doesn't preserve permissions during extraction:
+ # http://bugs.python.org/issue15795
+ fname = os.path.realpath(os.path.join(extract_to, entry))
+ try:
+ # getinfo() can raise KeyError
+ mode = bundle.getinfo(entry).external_attr >> 16 & 0x1FF
+ # Only set permissions if attributes are available. Otherwise all
+ # permissions will be removed eg. on Windows.
+ if mode:
+ os.chmod(fname, mode)
+
+ except KeyError:
+ self.warning('{} was not found in the zip file'.format(entry))
+
+
+ def deflate(self, compressed_file, mode, extract_to='.', *args, **kwargs):
+ """This method allows to extract a compressed file from a tar, tar.bz2 and tar.gz files.
+
+ Args:
+ compressed_file (object): File-like object with the contents of a compressed file.
+ mode (str): string of the form 'filemode[:compression]' (e.g. 'r:gz' or 'r:bz2')
+ extract_to (str, optional): where to extract the compressed file.
+ """
+ t = tarfile.open(fileobj=compressed_file, mode=mode)
+ t.extractall(path=extract_to)
+
+
+ def download_unpack(self, url, extract_to='.', extract_dirs='*', verbose=False):
+ """Generic method to download and extract a compressed file without writing it to disk first.
+
+ Args:
+ url (str): URL where the file to be downloaded is located.
+ extract_to (str, optional): directory where the downloaded file will
+ be extracted to.
+ extract_dirs (list, optional): directories inside the archive to extract.
+ Defaults to `*`. It currently only applies to zip files.
+ verbose (bool, optional): whether or not extracted content should be displayed.
+ Defaults to False.
+
+ """
+ def _determine_extraction_method_and_kwargs(url):
+ EXTENSION_TO_MIMETYPE = {
+ 'bz2': 'application/x-bzip2',
+ 'gz': 'application/x-gzip',
+ 'tar': 'application/x-tar',
+ 'zip': 'application/zip',
+ }
+ MIMETYPES = {
+ 'application/x-bzip2': {
+ 'function': self.deflate,
+ 'kwargs': {'mode': 'r:bz2'},
+ },
+ 'application/x-gzip': {
+ 'function': self.deflate,
+ 'kwargs': {'mode': 'r:gz'},
+ },
+ 'application/x-tar': {
+ 'function': self.deflate,
+ 'kwargs': {'mode': 'r'},
+ },
+ 'application/zip': {
+ 'function': self.unzip,
+ },
+ 'application/x-zip-compressed': {
+ 'function': self.unzip,
+ },
+ }
+
+ filename = url.split('/')[-1]
+ # XXX: bz2/gz instead of tar.{bz2/gz}
+ extension = filename[filename.rfind('.')+1:]
+ mimetype = EXTENSION_TO_MIMETYPE[extension]
+ self.debug('Mimetype: {}'.format(mimetype))
+
+ function = MIMETYPES[mimetype]['function']
+ kwargs = {
+ 'compressed_file': compressed_file,
+ 'extract_to': extract_to,
+ 'extract_dirs': extract_dirs,
+ 'verbose': verbose,
+ }
+ kwargs.update(MIMETYPES[mimetype].get('kwargs', {}))
+
+ return function, kwargs
+
+ # Many scripts overwrite this method and set extract_dirs to None
+ extract_dirs = '*' if extract_dirs is None else extract_dirs
+ self.info('Downloading and extracting to {} these dirs {} from {}'.format(
+ extract_to,
+ ', '.join(extract_dirs),
+ url,
+ ))
+
+ # 1) Let's fetch the file
+ retry_args = dict(
+ retry_exceptions=(
+ urllib2.HTTPError,
+ urllib2.URLError,
+ httplib.BadStatusLine,
+ socket.timeout,
+ socket.error,
+ FetchedIncorrectFilesize,
+ ),
+ error_message="Can't download from {}".format(url),
+ error_level=FATAL,
+ )
+ compressed_file = self.retry(
+ self.fetch_url_into_memory,
+ kwargs={'url': url},
+ **retry_args
+ )
+
+ # 2) We're guaranteed to have download the file with error_level=FATAL
+ # Let's unpack the file
+ function, kwargs = _determine_extraction_method_and_kwargs(url)
+ try:
+ function(**kwargs)
+ except zipfile.BadZipfile:
+ # Bug 1306189 - Sometimes a good download turns out to be a
+ # corrupted zipfile. Let's create a signature that is easy to match
+ self.fatal('Check bug 1306189 for details on downloading a truncated zip file.')
+
+
+ def load_json_url(self, url, error_level=None, *args, **kwargs):
+ """ Returns a json object from a url (it retries). """
+ contents = self._retry_download(
+ url=url, error_level=error_level, *args, **kwargs
+ )
+ return json.loads(contents.read())
+
+ # http://www.techniqal.com/blog/2008/07/31/python-file-read-write-with-urllib2/
+ # TODO thinking about creating a transfer object.
+ def download_file(self, url, file_name=None, parent_dir=None,
+ create_parent_dir=True, error_level=ERROR,
+ exit_code=3, retry_config=None):
+ """ Python wget.
+ Download the filename at `url` into `file_name` and put it on `parent_dir`.
+ On error log with the specified `error_level`, on fatal exit with `exit_code`.
+ Execute all the above based on `retry_config` parameter.
+
+ Args:
+ url (str): URL path where the file to be downloaded is located.
+ file_name (str, optional): file_name where the file will be written to.
+ Defaults to urls' filename.
+ parent_dir (str, optional): directory where the downloaded file will
+ be written to. Defaults to current working
+ directory
+ create_parent_dir (bool, optional): create the parent directory if it
+ doesn't exist. Defaults to `True`
+ error_level (str, optional): log level to use in case an error occurs.
+ Defaults to `ERROR`
+ retry_config (dict, optional): key-value pairs to be passed to
+ `self.retry`. Defaults to `None`
+
+ Returns:
+ str: filename where the downloaded file was written to.
+ unknown: on failure, `failure_status` is returned.
+ """
+ if not file_name:
+ try:
+ file_name = self.get_filename_from_url(url)
+ except AttributeError:
+ self.log("Unable to get filename from %s; bad url?" % url,
+ level=error_level, exit_code=exit_code)
+ return
+ if parent_dir:
+ file_name = os.path.join(parent_dir, file_name)
+ if create_parent_dir:
+ self.mkdir_p(parent_dir, error_level=error_level)
+ self.info("Downloading %s to %s" % (url, file_name))
+ status = self._retry_download(
+ url=url,
+ error_level=error_level,
+ file_name=file_name,
+ retry_config=retry_config
+ )
+ if status == file_name:
+ self.info("Downloaded %d bytes." % os.path.getsize(file_name))
+ return status
+
+ def move(self, src, dest, log_level=INFO, error_level=ERROR,
+ exit_code=-1):
+ """ recursively move a file or directory (src) to another location (dest).
+
+ Args:
+ src (str): file or directory path to move.
+ dest (str): file or directory path where to move the content to.
+ log_level (str): log level to use for normal operation. Defaults to
+ `INFO`
+ error_level (str): log level to use on error. Defaults to `ERROR`
+
+ Returns:
+ int: 0 on success. -1 on error.
+ """
+ self.log("Moving %s to %s" % (src, dest), level=log_level)
+ try:
+ shutil.move(src, dest)
+ # http://docs.python.org/tutorial/errors.html
+ except IOError, e:
+ self.log("IO error: %s" % str(e),
+ level=error_level, exit_code=exit_code)
+ return -1
+ except shutil.Error, e:
+ self.log("shutil error: %s" % str(e),
+ level=error_level, exit_code=exit_code)
+ return -1
+ return 0
+
+ def chmod(self, path, mode):
+ """ change `path` mode to `mode`.
+
+ Args:
+ path (str): path whose mode will be modified.
+ mode (hex): one of the values defined at `stat`_
+
+ .. _stat:
+ https://docs.python.org/2/library/os.html#os.chmod
+ """
+
+ self.info("Chmoding %s to %s" % (path, str(oct(mode))))
+ os.chmod(path, mode)
+
+ def copyfile(self, src, dest, log_level=INFO, error_level=ERROR, copystat=False, compress=False):
+ """ copy or compress `src` into `dest`.
+
+ Args:
+ src (str): filepath to copy.
+ dest (str): filepath where to move the content to.
+ log_level (str, optional): log level to use for normal operation. Defaults to
+ `INFO`
+ error_level (str, optional): log level to use on error. Defaults to `ERROR`
+ copystat (bool, optional): whether or not to copy the files metadata.
+ Defaults to `False`.
+ compress (bool, optional): whether or not to compress the destination file.
+ Defaults to `False`.
+
+ Returns:
+ int: -1 on error
+ None: on success
+ """
+
+ if compress:
+ self.log("Compressing %s to %s" % (src, dest), level=log_level)
+ try:
+ infile = open(src, "rb")
+ outfile = gzip.open(dest, "wb")
+ outfile.writelines(infile)
+ outfile.close()
+ infile.close()
+ except IOError, e:
+ self.log("Can't compress %s to %s: %s!" % (src, dest, str(e)),
+ level=error_level)
+ return -1
+ else:
+ self.log("Copying %s to %s" % (src, dest), level=log_level)
+ try:
+ shutil.copyfile(src, dest)
+ except (IOError, shutil.Error), e:
+ self.log("Can't copy %s to %s: %s!" % (src, dest, str(e)),
+ level=error_level)
+ return -1
+
+ if copystat:
+ try:
+ shutil.copystat(src, dest)
+ except (IOError, shutil.Error), e:
+ self.log("Can't copy attributes of %s to %s: %s!" % (src, dest, str(e)),
+ level=error_level)
+ return -1
+
+ def copytree(self, src, dest, overwrite='no_overwrite', log_level=INFO,
+ error_level=ERROR):
+ """ An implementation of `shutil.copytree` that allows for `dest` to exist
+ and implements different overwrite levels:
+ - 'no_overwrite' will keep all(any) existing files in destination tree
+ - 'overwrite_if_exists' will only overwrite destination paths that have
+ the same path names relative to the root of the
+ src and destination tree
+ - 'clobber' will replace the whole destination tree(clobber) if it exists
+
+ Args:
+ src (str): directory path to move.
+ dest (str): directory path where to move the content to.
+ overwrite (str): string specifying the overwrite level.
+ log_level (str, optional): log level to use for normal operation. Defaults to
+ `INFO`
+ error_level (str, optional): log level to use on error. Defaults to `ERROR`
+
+ Returns:
+ int: -1 on error
+ None: on success
+ """
+
+ self.info('copying tree: %s to %s' % (src, dest))
+ try:
+ if overwrite == 'clobber' or not os.path.exists(dest):
+ self.rmtree(dest)
+ shutil.copytree(src, dest)
+ elif overwrite == 'no_overwrite' or overwrite == 'overwrite_if_exists':
+ files = os.listdir(src)
+ for f in files:
+ abs_src_f = os.path.join(src, f)
+ abs_dest_f = os.path.join(dest, f)
+ if not os.path.exists(abs_dest_f):
+ if os.path.isdir(abs_src_f):
+ self.mkdir_p(abs_dest_f)
+ self.copytree(abs_src_f, abs_dest_f,
+ overwrite='clobber')
+ else:
+ shutil.copy2(abs_src_f, abs_dest_f)
+ elif overwrite == 'no_overwrite': # destination path exists
+ if os.path.isdir(abs_src_f) and os.path.isdir(abs_dest_f):
+ self.copytree(abs_src_f, abs_dest_f,
+ overwrite='no_overwrite')
+ else:
+ self.debug('ignoring path: %s as destination: \
+ %s exists' % (abs_src_f, abs_dest_f))
+ else: # overwrite == 'overwrite_if_exists' and destination exists
+ self.debug('overwriting: %s with: %s' %
+ (abs_dest_f, abs_src_f))
+ self.rmtree(abs_dest_f)
+
+ if os.path.isdir(abs_src_f):
+ self.mkdir_p(abs_dest_f)
+ self.copytree(abs_src_f, abs_dest_f,
+ overwrite='overwrite_if_exists')
+ else:
+ shutil.copy2(abs_src_f, abs_dest_f)
+ else:
+ self.fatal("%s is not a valid argument for param overwrite" % (overwrite))
+ except (IOError, shutil.Error):
+ self.exception("There was an error while copying %s to %s!" % (src, dest),
+ level=error_level)
+ return -1
+
+ def write_to_file(self, file_path, contents, verbose=True,
+ open_mode='w', create_parent_dir=False,
+ error_level=ERROR):
+ """ Write `contents` to `file_path`, according to `open_mode`.
+
+ Args:
+ file_path (str): filepath where the content will be written to.
+ contents (str): content to write to the filepath.
+ verbose (bool, optional): whether or not to log `contents` value.
+ Defaults to `True`
+ open_mode (str, optional): open mode to use for openning the file.
+ Defaults to `w`
+ create_parent_dir (bool, optional): whether or not to create the
+ parent directory of `file_path`
+ error_level (str, optional): log level to use on error. Defaults to `ERROR`
+
+ Returns:
+ str: `file_path` on success
+ None: on error.
+ """
+ self.info("Writing to file %s" % file_path)
+ if verbose:
+ self.info("Contents:")
+ for line in contents.splitlines():
+ self.info(" %s" % line)
+ if create_parent_dir:
+ parent_dir = os.path.dirname(file_path)
+ self.mkdir_p(parent_dir, error_level=error_level)
+ try:
+ fh = open(file_path, open_mode)
+ try:
+ fh.write(contents)
+ except UnicodeEncodeError:
+ fh.write(contents.encode('utf-8', 'replace'))
+ fh.close()
+ return file_path
+ except IOError:
+ self.log("%s can't be opened for writing!" % file_path,
+ level=error_level)
+
+ @contextmanager
+ def opened(self, file_path, verbose=True, open_mode='r',
+ error_level=ERROR):
+ """ Create a context manager to use on a with statement.
+
+ Args:
+ file_path (str): filepath of the file to open.
+ verbose (bool, optional): useless parameter, not used here.
+ Defaults to True.
+ open_mode (str, optional): open mode to use for openning the file.
+ Defaults to `r`
+ error_level (str, optional): log level name to use on error.
+ Defaults to `ERROR`
+
+ Yields:
+ tuple: (file object, error) pair. In case of error `None` is yielded
+ as file object, together with the corresponding error.
+ If there is no error, `None` is returned as the error.
+ """
+ # See opened_w_error in http://www.python.org/dev/peps/pep-0343/
+ self.info("Reading from file %s" % file_path)
+ try:
+ fh = open(file_path, open_mode)
+ except IOError, err:
+ self.log("unable to open %s: %s" % (file_path, err.strerror),
+ level=error_level)
+ yield None, err
+ else:
+ try:
+ yield fh, None
+ finally:
+ fh.close()
+
+ def read_from_file(self, file_path, verbose=True, open_mode='r',
+ error_level=ERROR):
+ """ Use `self.opened` context manager to open a file and read its
+ content.
+
+ Args:
+ file_path (str): filepath of the file to read.
+ verbose (bool, optional): whether or not to log the file content.
+ Defaults to True.
+ open_mode (str, optional): open mode to use for openning the file.
+ Defaults to `r`
+ error_level (str, optional): log level name to use on error.
+ Defaults to `ERROR`
+
+ Returns:
+ None: on error.
+ str: file content on success.
+ """
+ with self.opened(file_path, verbose, open_mode, error_level) as (fh, err):
+ if err:
+ return None
+ contents = fh.read()
+ if verbose:
+ self.info("Contents:")
+ for line in contents.splitlines():
+ self.info(" %s" % line)
+ return contents
+
+ def chdir(self, dir_name):
+ self.log("Changing directory to %s." % dir_name)
+ os.chdir(dir_name)
+
+ def is_exe(self, fpath):
+ """
+ Determine if fpath is a file and if it is executable.
+ """
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+ def which(self, program):
+ """ OS independent implementation of Unix's which command
+
+ Args:
+ program (str): name or path to the program whose executable is
+ being searched.
+
+ Returns:
+ None: if the executable was not found.
+ str: filepath of the executable file.
+ """
+ if self._is_windows() and not program.endswith(".exe"):
+ program += ".exe"
+ fpath, fname = os.path.split(program)
+ if fpath:
+ if self.is_exe(program):
+ return program
+ else:
+ # If the exe file is defined in the configs let's use that
+ exe = self.query_exe(program)
+ if self.is_exe(exe):
+ return exe
+
+ # If not defined, let's look for it in the $PATH
+ env = self.query_env()
+ for path in env["PATH"].split(os.pathsep):
+ exe_file = os.path.join(path, program)
+ if self.is_exe(exe_file):
+ return exe_file
+ return None
+
+ # More complex commands {{{2
+ def retry(self, action, attempts=None, sleeptime=60, max_sleeptime=5 * 60,
+ retry_exceptions=(Exception, ), good_statuses=None, cleanup=None,
+ error_level=ERROR, error_message="%(action)s failed after %(attempts)d tries!",
+ failure_status=-1, log_level=INFO, args=(), kwargs={}):
+ """ generic retry command. Ported from `util.retry`_
+
+ Args:
+ action (func): callable object to retry.
+ attempts (int, optinal): maximum number of times to call actions.
+ Defaults to `self.config.get('global_retries', 5)`
+ sleeptime (int, optional): number of seconds to wait between
+ attempts. Defaults to 60 and doubles each retry attempt, to
+ a maximum of `max_sleeptime'
+ max_sleeptime (int, optional): maximum value of sleeptime. Defaults
+ to 5 minutes
+ retry_exceptions (tuple, optional): Exceptions that should be caught.
+ If exceptions other than those listed in `retry_exceptions' are
+ raised from `action', they will be raised immediately. Defaults
+ to (Exception)
+ good_statuses (object, optional): return values which, if specified,
+ will result in retrying if the return value isn't listed.
+ Defaults to `None`.
+ cleanup (func, optional): If `cleanup' is provided and callable
+ it will be called immediately after an Exception is caught.
+ No arguments will be passed to it. If your cleanup function
+ requires arguments it is recommended that you wrap it in an
+ argumentless function.
+ Defaults to `None`.
+ error_level (str, optional): log level name in case of error.
+ Defaults to `ERROR`.
+ error_message (str, optional): string format to use in case
+ none of the attempts success. Defaults to
+ '%(action)s failed after %(attempts)d tries!'
+ failure_status (int, optional): flag to return in case the retries
+ were not successfull. Defaults to -1.
+ log_level (str, optional): log level name to use for normal activity.
+ Defaults to `INFO`.
+ args (tuple, optional): positional arguments to pass onto `action`.
+ kwargs (dict, optional): key-value arguments to pass onto `action`.
+
+ Returns:
+ object: return value of `action`.
+ int: failure status in case of failure retries.
+ """
+ if not callable(action):
+ self.fatal("retry() called with an uncallable method %s!" % action)
+ if cleanup and not callable(cleanup):
+ self.fatal("retry() called with an uncallable cleanup method %s!" % cleanup)
+ if not attempts:
+ attempts = self.config.get("global_retries", 5)
+ if max_sleeptime < sleeptime:
+ self.debug("max_sleeptime %d less than sleeptime %d" % (
+ max_sleeptime, sleeptime))
+ n = 0
+ while n <= attempts:
+ retry = False
+ n += 1
+ try:
+ self.log("retry: Calling %s with args: %s, kwargs: %s, attempt #%d" %
+ (action.__name__, str(args), str(kwargs), n), level=log_level)
+ status = action(*args, **kwargs)
+ if good_statuses and status not in good_statuses:
+ retry = True
+ except retry_exceptions, e:
+ retry = True
+ error_message = "%s\nCaught exception: %s" % (error_message, str(e))
+ self.log('retry: attempt #%d caught exception: %s' % (n, str(e)), level=INFO)
+
+ if not retry:
+ return status
+ else:
+ if cleanup:
+ cleanup()
+ if n == attempts:
+ self.log(error_message % {'action': action, 'attempts': n}, level=error_level)
+ return failure_status
+ if sleeptime > 0:
+ self.log("retry: Failed, sleeping %d seconds before retrying" %
+ sleeptime, level=log_level)
+ time.sleep(sleeptime)
+ sleeptime = sleeptime * 2
+ if sleeptime > max_sleeptime:
+ sleeptime = max_sleeptime
+
+ def query_env(self, partial_env=None, replace_dict=None,
+ purge_env=(),
+ set_self_env=None, log_level=DEBUG,
+ avoid_host_env=False):
+ """ Environment query/generation method.
+ The default, self.query_env(), will look for self.config['env']
+ and replace any special strings in there ( %(PATH)s ).
+ It will then store it as self.env for speeding things up later.
+
+ If you specify partial_env, partial_env will be used instead of
+ self.config['env'], and we don't save self.env as it's a one-off.
+
+
+ Args:
+ partial_env (dict, optional): key-value pairs of the name and value
+ of different environment variables. Defaults to an empty dictionary.
+ replace_dict (dict, optional): key-value pairs to replace the old
+ environment variables.
+ purge_env (list): environment names to delete from the final
+ environment dictionary.
+ set_self_env (boolean, optional): whether or not the environment
+ variables dictionary should be copied to `self`.
+ Defaults to True.
+ log_level (str, optional): log level name to use on normal operation.
+ Defaults to `DEBUG`.
+ avoid_host_env (boolean, optional): if set to True, we will not use
+ any environment variables set on the host except PATH.
+ Defaults to False.
+
+ Returns:
+ dict: environment variables names with their values.
+ """
+ if partial_env is None:
+ if self.env is not None:
+ return self.env
+ partial_env = self.config.get('env', None)
+ if partial_env is None:
+ partial_env = {}
+ if set_self_env is None:
+ set_self_env = True
+
+ env = {'PATH': os.environ['PATH']} if avoid_host_env else os.environ.copy()
+
+ default_replace_dict = self.query_abs_dirs()
+ default_replace_dict['PATH'] = os.environ['PATH']
+ if not replace_dict:
+ replace_dict = default_replace_dict
+ else:
+ for key in default_replace_dict:
+ if key not in replace_dict:
+ replace_dict[key] = default_replace_dict[key]
+ for key in partial_env.keys():
+ env[key] = partial_env[key] % replace_dict
+ self.log("ENV: %s is now %s" % (key, env[key]), level=log_level)
+ for k in purge_env:
+ if k in env:
+ del env[k]
+ if set_self_env:
+ self.env = env
+ return env
+
+ def query_exe(self, exe_name, exe_dict='exes', default=None,
+ return_type=None, error_level=FATAL):
+ """One way to work around PATH rewrites.
+
+ By default, return exe_name, and we'll fall through to searching
+ os.environ["PATH"].
+ However, if self.config[exe_dict][exe_name] exists, return that.
+ This lets us override exe paths via config file.
+
+ If we need runtime setting, we can build in self.exes support later.
+
+ Args:
+ exe_name (str): name of the executable to search for.
+ exe_dict(str, optional): name of the dictionary of executables
+ present in `self.config`. Defaults to `exes`.
+ default (str, optional): default name of the executable to search
+ for. Defaults to `exe_name`.
+ return_type (str, optional): type to which the original return
+ value will be turn into. Only 'list', 'string' and `None` are
+ supported. Defaults to `None`.
+ error_level (str, optional): log level name to use on error.
+
+ Returns:
+ list: in case return_type is 'list'
+ str: in case return_type is 'string'
+ None: in case return_type is `None`
+ Any: if the found executable is not of type list, tuple nor str.
+ """
+ if default is None:
+ default = exe_name
+ exe = self.config.get(exe_dict, {}).get(exe_name, default)
+ repl_dict = {}
+ if hasattr(self.script_obj, 'query_abs_dirs'):
+ # allow for 'make': '%(abs_work_dir)s/...' etc.
+ dirs = self.script_obj.query_abs_dirs()
+ repl_dict.update(dirs)
+ if isinstance(exe, dict):
+ found = False
+ # allow for searchable paths of the buildbot exe
+ for name, path in exe.iteritems():
+ if isinstance(path, list) or isinstance(path, tuple):
+ path = [x % repl_dict for x in path]
+ if all([os.path.exists(section) for section in path]):
+ found = True
+ elif isinstance(path, str):
+ path = path % repl_dict
+ if os.path.exists(path):
+ found = True
+ else:
+ self.log("a exes %s dict's value is not a string, list, or tuple. Got key "
+ "%s and value %s" % (exe_name, name, str(path)), level=error_level)
+ if found:
+ exe = path
+ break
+ else:
+ self.log("query_exe was a searchable dict but an existing path could not be "
+ "determined. Tried searching in paths: %s" % (str(exe)), level=error_level)
+ return None
+ elif isinstance(exe, list) or isinstance(exe, tuple):
+ exe = [x % repl_dict for x in exe]
+ elif isinstance(exe, str):
+ exe = exe % repl_dict
+ else:
+ self.log("query_exe: %s is not a list, tuple, dict, or string: "
+ "%s!" % (exe_name, str(exe)), level=error_level)
+ return exe
+ if return_type == "list":
+ if isinstance(exe, str):
+ exe = [exe]
+ elif return_type == "string":
+ if isinstance(exe, list):
+ exe = subprocess.list2cmdline(exe)
+ elif return_type is not None:
+ self.log("Unknown return_type type %s requested in query_exe!" % return_type, level=error_level)
+ return exe
+
+ def run_command(self, command, cwd=None, error_list=None,
+ halt_on_failure=False, success_codes=None,
+ env=None, partial_env=None, return_type='status',
+ throw_exception=False, output_parser=None,
+ output_timeout=None, fatal_exit_code=2,
+ error_level=ERROR, **kwargs):
+ """Run a command, with logging and error parsing.
+ TODO: context_lines
+
+ error_list example:
+ [{'regex': re.compile('^Error: LOL J/K'), level=IGNORE},
+ {'regex': re.compile('^Error:'), level=ERROR, contextLines='5:5'},
+ {'substr': 'THE WORLD IS ENDING', level=FATAL, contextLines='20:'}
+ ]
+ (context_lines isn't written yet)
+
+ Args:
+ command (str | list | tuple): command or sequence of commands to
+ execute and log.
+ cwd (str, optional): directory path from where to execute the
+ command. Defaults to `None`.
+ error_list (list, optional): list of errors to pass to
+ `mozharness.base.log.OutputParser`. Defaults to `None`.
+ halt_on_failure (bool, optional): whether or not to redefine the
+ log level as `FATAL` on errors. Defaults to False.
+ success_codes (int, optional): numeric value to compare against
+ the command return value.
+ env (dict, optional): key-value of environment values to use to
+ run the command. Defaults to None.
+ partial_env (dict, optional): key-value of environment values to
+ replace from the current environment values. Defaults to None.
+ return_type (str, optional): if equal to 'num_errors' then the
+ amount of errors matched by `error_list` is returned. Defaults
+ to 'status'.
+ throw_exception (bool, optional): whether or not to raise an
+ exception if the return value of the command doesn't match
+ any of the `success_codes`. Defaults to False.
+ output_parser (OutputParser, optional): lets you provide an
+ instance of your own OutputParser subclass. Defaults to `OutputParser`.
+ output_timeout (int): amount of seconds to wait for output before
+ the process is killed.
+ fatal_exit_code (int, optional): call `self.fatal` if the return value
+ of the command is not in `success_codes`. Defaults to 2.
+ error_level (str, optional): log level name to use on error. Defaults
+ to `ERROR`.
+ **kwargs: Arbitrary keyword arguments.
+
+ Returns:
+ int: -1 on error.
+ Any: `command` return value is returned otherwise.
+ """
+ if success_codes is None:
+ success_codes = [0]
+ if cwd is not None:
+ if not os.path.isdir(cwd):
+ level = error_level
+ if halt_on_failure:
+ level = FATAL
+ self.log("Can't run command %s in non-existent directory '%s'!" %
+ (command, cwd), level=level)
+ return -1
+ self.info("Running command: %s in %s" % (command, cwd))
+ else:
+ self.info("Running command: %s" % command)
+ if isinstance(command, list) or isinstance(command, tuple):
+ self.info("Copy/paste: %s" % subprocess.list2cmdline(command))
+ shell = True
+ if isinstance(command, list) or isinstance(command, tuple):
+ shell = False
+ if env is None:
+ if partial_env:
+ self.info("Using partial env: %s" % pprint.pformat(partial_env))
+ env = self.query_env(partial_env=partial_env)
+ else:
+ self.info("Using env: %s" % pprint.pformat(env))
+
+ if output_parser is None:
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=error_list)
+ else:
+ parser = output_parser
+
+ try:
+ if output_timeout:
+ def processOutput(line):
+ parser.add_lines(line)
+
+ def onTimeout():
+ self.info("Automation Error: mozprocess timed out after %s seconds running %s" % (str(output_timeout), str(command)))
+
+ p = ProcessHandler(command,
+ shell=shell,
+ env=env,
+ cwd=cwd,
+ storeOutput=False,
+ onTimeout=(onTimeout,),
+ processOutputLine=[processOutput])
+ self.info("Calling %s with output_timeout %d" % (command, output_timeout))
+ p.run(outputTimeout=output_timeout)
+ p.wait()
+ if p.timedOut:
+ self.log(
+ 'timed out after %s seconds of no output' % output_timeout,
+ level=error_level
+ )
+ returncode = int(p.proc.returncode)
+ else:
+ p = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
+ cwd=cwd, stderr=subprocess.STDOUT, env=env,
+ bufsize=0)
+ loop = True
+ while loop:
+ if p.poll() is not None:
+ """Avoid losing the final lines of the log?"""
+ loop = False
+ while True:
+ line = p.stdout.readline()
+ if not line:
+ break
+ parser.add_lines(line)
+ returncode = p.returncode
+ except OSError, e:
+ level = error_level
+ if halt_on_failure:
+ level = FATAL
+ self.log('caught OS error %s: %s while running %s' % (e.errno,
+ e.strerror, command), level=level)
+ return -1
+
+ return_level = INFO
+ if returncode not in success_codes:
+ return_level = error_level
+ if throw_exception:
+ raise subprocess.CalledProcessError(returncode, command)
+ self.log("Return code: %d" % returncode, level=return_level)
+
+ if halt_on_failure:
+ _fail = False
+ if returncode not in success_codes:
+ self.log(
+ "%s not in success codes: %s" % (returncode, success_codes),
+ level=error_level
+ )
+ _fail = True
+ if parser.num_errors:
+ self.log("failures found while parsing output", level=error_level)
+ _fail = True
+ if _fail:
+ self.return_code = fatal_exit_code
+ self.fatal("Halting on failure while running %s" % command,
+ exit_code=fatal_exit_code)
+ if return_type == 'num_errors':
+ return parser.num_errors
+ return returncode
+
+ def get_output_from_command(self, command, cwd=None,
+ halt_on_failure=False, env=None,
+ silent=False, log_level=INFO,
+ tmpfile_base_path='tmpfile',
+ return_type='output', save_tmpfiles=False,
+ throw_exception=False, fatal_exit_code=2,
+ ignore_errors=False, success_codes=None):
+ """Similar to run_command, but where run_command is an
+ os.system(command) analog, get_output_from_command is a `command`
+ analog.
+
+ Less error checking by design, though if we figure out how to
+ do it without borking the output, great.
+
+ TODO: binary mode? silent is kinda like that.
+ TODO: since p.wait() can take a long time, optionally log something
+ every N seconds?
+ TODO: optionally only keep the first or last (N) line(s) of output?
+ TODO: optionally only return the tmp_stdout_filename?
+
+ ignore_errors=True is for the case where a command might produce standard
+ error output, but you don't particularly care; setting to True will
+ cause standard error to be logged at DEBUG rather than ERROR
+
+ Args:
+ command (str | list): command or list of commands to
+ execute and log.
+ cwd (str, optional): directory path from where to execute the
+ command. Defaults to `None`.
+ halt_on_failure (bool, optional): whether or not to redefine the
+ log level as `FATAL` on error. Defaults to False.
+ env (dict, optional): key-value of environment values to use to
+ run the command. Defaults to None.
+ silent (bool, optional): whether or not to output the stdout of
+ executing the command. Defaults to False.
+ log_level (str, optional): log level name to use on normal execution.
+ Defaults to `INFO`.
+ tmpfile_base_path (str, optional): base path of the file to which
+ the output will be writen to. Defaults to 'tmpfile'.
+ return_type (str, optional): if equal to 'output' then the complete
+ output of the executed command is returned, otherwise the written
+ filenames are returned. Defaults to 'output'.
+ save_tmpfiles (bool, optional): whether or not to save the temporary
+ files created from the command output. Defaults to False.
+ throw_exception (bool, optional): whether or not to raise an
+ exception if the return value of the command is not zero.
+ Defaults to False.
+ fatal_exit_code (int, optional): call self.fatal if the return value
+ of the command match this value.
+ ignore_errors (bool, optional): whether or not to change the log
+ level to `ERROR` for the output of stderr. Defaults to False.
+ success_codes (int, optional): numeric value to compare against
+ the command return value.
+
+ Returns:
+ None: if the cwd is not a directory.
+ None: on IOError.
+ tuple: stdout and stderr filenames.
+ str: stdout output.
+ """
+ if cwd:
+ if not os.path.isdir(cwd):
+ level = ERROR
+ if halt_on_failure:
+ level = FATAL
+ self.log("Can't run command %s in non-existent directory %s!" %
+ (command, cwd), level=level)
+ return None
+ self.info("Getting output from command: %s in %s" % (command, cwd))
+ else:
+ self.info("Getting output from command: %s" % command)
+ if isinstance(command, list):
+ self.info("Copy/paste: %s" % subprocess.list2cmdline(command))
+ # This could potentially return something?
+ tmp_stdout = None
+ tmp_stderr = None
+ tmp_stdout_filename = '%s_stdout' % tmpfile_base_path
+ tmp_stderr_filename = '%s_stderr' % tmpfile_base_path
+ if success_codes is None:
+ success_codes = [0]
+
+ # TODO probably some more elegant solution than 2 similar passes
+ try:
+ tmp_stdout = open(tmp_stdout_filename, 'w')
+ except IOError:
+ level = ERROR
+ if halt_on_failure:
+ level = FATAL
+ self.log("Can't open %s for writing!" % tmp_stdout_filename +
+ self.exception(), level=level)
+ return None
+ try:
+ tmp_stderr = open(tmp_stderr_filename, 'w')
+ except IOError:
+ level = ERROR
+ if halt_on_failure:
+ level = FATAL
+ self.log("Can't open %s for writing!" % tmp_stderr_filename +
+ self.exception(), level=level)
+ return None
+ shell = True
+ if isinstance(command, list):
+ shell = False
+ p = subprocess.Popen(command, shell=shell, stdout=tmp_stdout,
+ cwd=cwd, stderr=tmp_stderr, env=env)
+ # XXX: changed from self.debug to self.log due to this error:
+ # TypeError: debug() takes exactly 1 argument (2 given)
+ self.log("Temporary files: %s and %s" % (tmp_stdout_filename, tmp_stderr_filename), level=DEBUG)
+ p.wait()
+ tmp_stdout.close()
+ tmp_stderr.close()
+ return_level = DEBUG
+ output = None
+ if os.path.exists(tmp_stdout_filename) and os.path.getsize(tmp_stdout_filename):
+ output = self.read_from_file(tmp_stdout_filename,
+ verbose=False)
+ if not silent:
+ self.log("Output received:", level=log_level)
+ output_lines = output.rstrip().splitlines()
+ for line in output_lines:
+ if not line or line.isspace():
+ continue
+ line = line.decode("utf-8")
+ self.log(' %s' % line, level=log_level)
+ output = '\n'.join(output_lines)
+ if os.path.exists(tmp_stderr_filename) and os.path.getsize(tmp_stderr_filename):
+ if not ignore_errors:
+ return_level = ERROR
+ self.log("Errors received:", level=return_level)
+ errors = self.read_from_file(tmp_stderr_filename,
+ verbose=False)
+ for line in errors.rstrip().splitlines():
+ if not line or line.isspace():
+ continue
+ line = line.decode("utf-8")
+ self.log(' %s' % line, level=return_level)
+ elif p.returncode not in success_codes and not ignore_errors:
+ return_level = ERROR
+ # Clean up.
+ if not save_tmpfiles:
+ self.rmtree(tmp_stderr_filename, log_level=DEBUG)
+ self.rmtree(tmp_stdout_filename, log_level=DEBUG)
+ if p.returncode and throw_exception:
+ raise subprocess.CalledProcessError(p.returncode, command)
+ self.log("Return code: %d" % p.returncode, level=return_level)
+ if halt_on_failure and return_level == ERROR:
+ self.return_code = fatal_exit_code
+ self.fatal("Halting on failure while running %s" % command,
+ exit_code=fatal_exit_code)
+ # Hm, options on how to return this? I bet often we'll want
+ # output_lines[0] with no newline.
+ if return_type != 'output':
+ return (tmp_stdout_filename, tmp_stderr_filename)
+ else:
+ return output
+
+ def _touch_file(self, file_name, times=None, error_level=FATAL):
+ """touch a file.
+
+ Args:
+ file_name (str): name of the file to touch.
+ times (tuple, optional): 2-tuple as specified by `os.utime`_
+ Defaults to None.
+ error_level (str, optional): log level name in case of error.
+ Defaults to `FATAL`.
+
+ .. _`os.utime`:
+ https://docs.python.org/3.4/library/os.html?highlight=os.utime#os.utime
+ """
+ self.info("Touching: %s" % file_name)
+ try:
+ os.utime(file_name, times)
+ except OSError:
+ try:
+ open(file_name, 'w').close()
+ except IOError as e:
+ msg = "I/O error(%s): %s" % (e.errno, e.strerror)
+ self.log(msg, error_level=error_level)
+ os.utime(file_name, times)
+
+ def unpack(self, filename, extract_to, extract_dirs=None,
+ error_level=ERROR, fatal_exit_code=2, verbose=False):
+ """The method allows to extract a file regardless of its extension.
+
+ Args:
+ filename (str): filename of the compressed file.
+ extract_to (str): where to extract the compressed file.
+ extract_dirs (list, optional): directories inside the archive file to extract.
+ Defaults to `None`.
+ fatal_exit_code (int, optional): call `self.fatal` if the return value
+ of the command is not in `success_codes`. Defaults to 2.
+ verbose (bool, optional): whether or not extracted content should be displayed.
+ Defaults to False.
+
+ Raises:
+ IOError: on `filename` file not found.
+
+ """
+ if not os.path.isfile(filename):
+ raise IOError('Could not find file to extract: %s' % filename)
+
+ if zipfile.is_zipfile(filename):
+ try:
+ self.info('Using ZipFile to extract {} to {}'.format(filename, extract_to))
+ with zipfile.ZipFile(filename) as bundle:
+ for entry in self._filter_entries(bundle.namelist(), extract_dirs):
+ if verbose:
+ self.info(' %s' % entry)
+ bundle.extract(entry, path=extract_to)
+
+ # ZipFile doesn't preserve permissions during extraction:
+ # http://bugs.python.org/issue15795
+ fname = os.path.realpath(os.path.join(extract_to, entry))
+ mode = bundle.getinfo(entry).external_attr >> 16 & 0x1FF
+ # Only set permissions if attributes are available. Otherwise all
+ # permissions will be removed eg. on Windows.
+ if mode:
+ os.chmod(fname, mode)
+ except zipfile.BadZipfile as e:
+ self.log('%s (%s)' % (e.message, filename),
+ level=error_level, exit_code=fatal_exit_code)
+
+ # Bug 1211882 - is_tarfile cannot be trusted for dmg files
+ elif tarfile.is_tarfile(filename) and not filename.lower().endswith('.dmg'):
+ try:
+ self.info('Using TarFile to extract {} to {}'.format(filename, extract_to))
+ with tarfile.open(filename) as bundle:
+ for entry in self._filter_entries(bundle.getnames(), extract_dirs):
+ if verbose:
+ self.info(' %s' % entry)
+ bundle.extract(entry, path=extract_to)
+ except tarfile.TarError as e:
+ self.log('%s (%s)' % (e.message, filename),
+ level=error_level, exit_code=fatal_exit_code)
+ else:
+ self.log('No extraction method found for: %s' % filename,
+ level=error_level, exit_code=fatal_exit_code)
+
+ def is_taskcluster(self):
+ """Returns boolean indicating if we're running in TaskCluster."""
+ # This may need expanding in the future to work on
+ return 'TASKCLUSTER_WORKER_TYPE' in os.environ
+
+
+def PreScriptRun(func):
+ """Decorator for methods that will be called before script execution.
+
+ Each method on a BaseScript having this decorator will be called at the
+ beginning of BaseScript.run().
+
+ The return value is ignored. Exceptions will abort execution.
+ """
+ func._pre_run_listener = True
+ return func
+
+
+def PostScriptRun(func):
+ """Decorator for methods that will be called after script execution.
+
+ This is similar to PreScriptRun except it is called at the end of
+ execution. The method will always be fired, even if execution fails.
+ """
+ func._post_run_listener = True
+ return func
+
+
+def PreScriptAction(action=None):
+ """Decorator for methods that will be called at the beginning of each action.
+
+ Each method on a BaseScript having this decorator will be called during
+ BaseScript.run() before an individual action is executed. The method will
+ receive the action's name as an argument.
+
+ If no values are passed to the decorator, it will be applied to every
+ action. If a string is passed, the decorated function will only be called
+ for the action of that name.
+
+ The return value of the method is ignored. Exceptions will abort execution.
+ """
+ def _wrapped(func):
+ func._pre_action_listener = action
+ return func
+
+ def _wrapped_none(func):
+ func._pre_action_listener = None
+ return func
+
+ if type(action) == type(_wrapped):
+ return _wrapped_none(action)
+
+ return _wrapped
+
+
+def PostScriptAction(action=None):
+ """Decorator for methods that will be called at the end of each action.
+
+ This behaves similarly to PreScriptAction. It varies in that it is called
+ after execution of the action.
+
+ The decorated method will receive the action name as a positional argument.
+ It will then receive the following named arguments:
+
+ success - Bool indicating whether the action finished successfully.
+
+ The decorated method will always be called, even if the action threw an
+ exception.
+
+ The return value is ignored.
+ """
+ def _wrapped(func):
+ func._post_action_listener = action
+ return func
+
+ def _wrapped_none(func):
+ func._post_action_listener = None
+ return func
+
+ if type(action) == type(_wrapped):
+ return _wrapped_none(action)
+
+ return _wrapped
+
+
+# BaseScript {{{1
+class BaseScript(ScriptMixin, LogMixin, object):
+ def __init__(self, config_options=None, ConfigClass=BaseConfig,
+ default_log_level="info", **kwargs):
+ self._return_code = 0
+ super(BaseScript, self).__init__()
+
+ # Collect decorated methods. We simply iterate over the attributes of
+ # the current class instance and look for signatures deposited by
+ # the decorators.
+ self._listeners = dict(
+ pre_run=[],
+ pre_action=[],
+ post_action=[],
+ post_run=[],
+ )
+ for k in dir(self):
+ item = getattr(self, k)
+
+ # We only decorate methods, so ignore other types.
+ if not inspect.ismethod(item):
+ continue
+
+ if hasattr(item, '_pre_run_listener'):
+ self._listeners['pre_run'].append(k)
+
+ if hasattr(item, '_pre_action_listener'):
+ self._listeners['pre_action'].append((
+ k,
+ item._pre_action_listener))
+
+ if hasattr(item, '_post_action_listener'):
+ self._listeners['post_action'].append((
+ k,
+ item._post_action_listener))
+
+ if hasattr(item, '_post_run_listener'):
+ self._listeners['post_run'].append(k)
+
+ self.log_obj = None
+ self.abs_dirs = None
+ if config_options is None:
+ config_options = []
+ self.summary_list = []
+ self.failures = []
+ rw_config = ConfigClass(config_options=config_options, **kwargs)
+ self.config = rw_config.get_read_only_config()
+ self.actions = tuple(rw_config.actions)
+ self.all_actions = tuple(rw_config.all_actions)
+ self.env = None
+ self.new_log_obj(default_log_level=default_log_level)
+ self.script_obj = self
+
+ # Indicate we're a source checkout if VCS directory is present at the
+ # appropriate place. This code will break if this file is ever moved
+ # to another directory.
+ self.topsrcdir = None
+
+ srcreldir = 'testing/mozharness/mozharness/base'
+ here = os.path.normpath(os.path.dirname(__file__))
+ if here.replace('\\', '/').endswith(srcreldir):
+ topsrcdir = os.path.normpath(os.path.join(here, '..', '..',
+ '..', '..'))
+ hg_dir = os.path.join(topsrcdir, '.hg')
+ git_dir = os.path.join(topsrcdir, '.git')
+ if os.path.isdir(hg_dir) or os.path.isdir(git_dir):
+ self.topsrcdir = topsrcdir
+
+ # Set self.config to read-only.
+ #
+ # We can create intermediate config info programmatically from
+ # this in a repeatable way, with logs; this is how we straddle the
+ # ideal-but-not-user-friendly static config and the
+ # easy-to-write-hard-to-debug writable config.
+ #
+ # To allow for other, script-specific configurations
+ # (e.g., buildbot props json parsing), before locking,
+ # call self._pre_config_lock(). If needed, this method can
+ # alter self.config.
+ self._pre_config_lock(rw_config)
+ self._config_lock()
+
+ self.info("Run as %s" % rw_config.command_line)
+ if self.config.get("dump_config_hierarchy"):
+ # we only wish to dump and display what self.config is made up of,
+ # against the current script + args, without actually running any
+ # actions
+ self._dump_config_hierarchy(rw_config.all_cfg_files_and_dicts)
+ if self.config.get("dump_config"):
+ self.dump_config(exit_on_finish=True)
+
+ def _dump_config_hierarchy(self, cfg_files):
+ """ interpret each config file used.
+
+ This will show which keys/values are being added or overwritten by
+ other config files depending on their hierarchy (when they were added).
+ """
+ # go through each config_file. We will start with the lowest and
+ # print its keys/values that are being used in self.config. If any
+ # keys/values are present in a config file with a higher precedence,
+ # ignore those.
+ dirs = self.query_abs_dirs()
+ cfg_files_dump_config = {} # we will dump this to file
+ # keep track of keys that did not come from a config file
+ keys_not_from_file = set(self.config.keys())
+ if not cfg_files:
+ cfg_files = []
+ self.info("Total config files: %d" % (len(cfg_files)))
+ if len(cfg_files):
+ self.info("cfg files used from lowest precedence to highest:")
+ for i, (target_file, target_dict) in enumerate(cfg_files):
+ unique_keys = set(target_dict.keys())
+ unique_dict = {}
+ # iterate through the target_dicts remaining 'higher' cfg_files
+ remaining_cfgs = cfg_files[slice(i + 1, len(cfg_files))]
+ # where higher == more precedent
+ for ii, (higher_file, higher_dict) in enumerate(remaining_cfgs):
+ # now only keep keys/values that are not overwritten by a
+ # higher config
+ unique_keys = unique_keys.difference(set(higher_dict.keys()))
+ # unique_dict we know now has only keys/values that are unique to
+ # this config file.
+ unique_dict = dict(
+ (key, target_dict.get(key)) for key in unique_keys
+ )
+ cfg_files_dump_config[target_file] = unique_dict
+ self.action_message("Config File %d: %s" % (i + 1, target_file))
+ self.info(pprint.pformat(unique_dict))
+ # let's also find out which keys/values from self.config are not
+ # from each target config file dict
+ keys_not_from_file = keys_not_from_file.difference(
+ set(target_dict.keys())
+ )
+ not_from_file_dict = dict(
+ (key, self.config.get(key)) for key in keys_not_from_file
+ )
+ cfg_files_dump_config["not_from_cfg_file"] = not_from_file_dict
+ self.action_message("Not from any config file (default_config, "
+ "cmd line options, etc)")
+ self.info(pprint.pformat(not_from_file_dict))
+
+ # finally, let's dump this output as JSON and exit early
+ self.dump_config(
+ os.path.join(dirs['abs_log_dir'], "localconfigfiles.json"),
+ cfg_files_dump_config, console_output=False, exit_on_finish=True
+ )
+
+ def _pre_config_lock(self, rw_config):
+ """This empty method can allow for config checking and manipulation
+ before the config lock, when overridden in scripts.
+ """
+ pass
+
+ def _config_lock(self):
+ """After this point, the config is locked and should not be
+ manipulated (based on mozharness.base.config.ReadOnlyDict)
+ """
+ self.config.lock()
+
+ def _possibly_run_method(self, method_name, error_if_missing=False):
+ """This is here for run().
+ """
+ if hasattr(self, method_name) and callable(getattr(self, method_name)):
+ return getattr(self, method_name)()
+ elif error_if_missing:
+ self.error("No such method %s!" % method_name)
+
+ @PostScriptRun
+ def copy_logs_to_upload_dir(self):
+ """Copies logs to the upload directory"""
+ self.info("Copying logs to upload dir...")
+ log_files = ['localconfig.json']
+ for log_name in self.log_obj.log_files.keys():
+ log_files.append(self.log_obj.log_files[log_name])
+ dirs = self.query_abs_dirs()
+ for log_file in log_files:
+ self.copy_to_upload_dir(os.path.join(dirs['abs_log_dir'], log_file),
+ dest=os.path.join('logs', log_file),
+ short_desc='%s log' % log_name,
+ long_desc='%s log' % log_name,
+ max_backups=self.config.get("log_max_rotate", 0))
+
+ def run_action(self, action):
+ if action not in self.actions:
+ self.action_message("Skipping %s step." % action)
+ return
+
+ method_name = action.replace("-", "_")
+ self.action_message("Running %s step." % action)
+
+ # An exception during a pre action listener should abort execution.
+ for fn, target in self._listeners['pre_action']:
+ if target is not None and target != action:
+ continue
+
+ try:
+ self.info("Running pre-action listener: %s" % fn)
+ method = getattr(self, fn)
+ method(action)
+ except Exception:
+ self.error("Exception during pre-action for %s: %s" % (
+ action, traceback.format_exc()))
+
+ for fn, target in self._listeners['post_action']:
+ if target is not None and target != action:
+ continue
+
+ try:
+ self.info("Running post-action listener: %s" % fn)
+ method = getattr(self, fn)
+ method(action, success=False)
+ except Exception:
+ self.error("An additional exception occurred during "
+ "post-action for %s: %s" % (action,
+ traceback.format_exc()))
+
+ self.fatal("Aborting due to exception in pre-action listener.")
+
+ # We always run post action listeners, even if the main routine failed.
+ success = False
+ try:
+ self.info("Running main action method: %s" % method_name)
+ self._possibly_run_method("preflight_%s" % method_name)
+ self._possibly_run_method(method_name, error_if_missing=True)
+ self._possibly_run_method("postflight_%s" % method_name)
+ success = True
+ finally:
+ post_success = True
+ for fn, target in self._listeners['post_action']:
+ if target is not None and target != action:
+ continue
+
+ try:
+ self.info("Running post-action listener: %s" % fn)
+ method = getattr(self, fn)
+ method(action, success=success and self.return_code == 0)
+ except Exception:
+ post_success = False
+ self.error("Exception during post-action for %s: %s" % (
+ action, traceback.format_exc()))
+
+ step_result = 'success' if success else 'failed'
+ self.action_message("Finished %s step (%s)" % (action, step_result))
+
+ if not post_success:
+ self.fatal("Aborting due to failure in post-action listener.")
+
+ def run(self):
+ """Default run method.
+ This is the "do everything" method, based on actions and all_actions.
+
+ First run self.dump_config() if it exists.
+ Second, go through the list of all_actions.
+ If they're in the list of self.actions, try to run
+ self.preflight_ACTION(), self.ACTION(), and self.postflight_ACTION().
+
+ Preflight is sanity checking before doing anything time consuming or
+ destructive.
+
+ Postflight is quick testing for success after an action.
+
+ """
+ for fn in self._listeners['pre_run']:
+ try:
+ self.info("Running pre-run listener: %s" % fn)
+ method = getattr(self, fn)
+ method()
+ except Exception:
+ self.error("Exception during pre-run listener: %s" %
+ traceback.format_exc())
+
+ for fn in self._listeners['post_run']:
+ try:
+ method = getattr(self, fn)
+ method()
+ except Exception:
+ self.error("An additional exception occurred during a "
+ "post-run listener: %s" % traceback.format_exc())
+
+ self.fatal("Aborting due to failure in pre-run listener.")
+
+ self.dump_config()
+ try:
+ for action in self.all_actions:
+ self.run_action(action)
+ except Exception:
+ self.fatal("Uncaught exception: %s" % traceback.format_exc())
+ finally:
+ post_success = True
+ for fn in self._listeners['post_run']:
+ try:
+ self.info("Running post-run listener: %s" % fn)
+ method = getattr(self, fn)
+ method()
+ except Exception:
+ post_success = False
+ self.error("Exception during post-run listener: %s" %
+ traceback.format_exc())
+
+ if not post_success:
+ self.fatal("Aborting due to failure in post-run listener.")
+ if self.config.get("copy_logs_post_run", True):
+ self.copy_logs_to_upload_dir()
+
+ return self.return_code
+
+ def run_and_exit(self):
+ """Runs the script and exits the current interpreter."""
+ rc = self.run()
+ if rc != 0:
+ self.warning("returning nonzero exit status %d" % rc)
+ sys.exit(rc)
+
+ def clobber(self):
+ """
+ Delete the working directory
+ """
+ dirs = self.query_abs_dirs()
+ self.rmtree(dirs['abs_work_dir'], error_level=FATAL)
+
+ def query_abs_dirs(self):
+ """We want to be able to determine where all the important things
+ are. Absolute paths lend themselves well to this, though I wouldn't
+ be surprised if this causes some issues somewhere.
+
+ This should be overridden in any script that has additional dirs
+ to query.
+
+ The query_* methods tend to set self.VAR variables as their
+ runtime cache.
+ """
+ if self.abs_dirs:
+ return self.abs_dirs
+ c = self.config
+ dirs = {}
+ dirs['base_work_dir'] = c['base_work_dir']
+ dirs['abs_work_dir'] = os.path.join(c['base_work_dir'], c['work_dir'])
+ dirs['abs_upload_dir'] = os.path.join(dirs['abs_work_dir'], 'upload')
+ dirs['abs_log_dir'] = os.path.join(c['base_work_dir'], c.get('log_dir', 'logs'))
+ self.abs_dirs = dirs
+ return self.abs_dirs
+
+ def dump_config(self, file_path=None, config=None,
+ console_output=True, exit_on_finish=False):
+ """Dump self.config to localconfig.json
+ """
+ config = config or self.config
+ dirs = self.query_abs_dirs()
+ if not file_path:
+ file_path = os.path.join(dirs['abs_log_dir'], "localconfig.json")
+ self.info("Dumping config to %s." % file_path)
+ self.mkdir_p(os.path.dirname(file_path))
+ json_config = json.dumps(config, sort_keys=True, indent=4)
+ fh = codecs.open(file_path, encoding='utf-8', mode='w+')
+ fh.write(json_config)
+ fh.close()
+ if console_output:
+ self.info(pprint.pformat(config))
+ if exit_on_finish:
+ sys.exit()
+
+ # logging {{{2
+ def new_log_obj(self, default_log_level="info"):
+ c = self.config
+ log_dir = os.path.join(c['base_work_dir'], c.get('log_dir', 'logs'))
+ log_config = {
+ "logger_name": 'Simple',
+ "log_name": 'log',
+ "log_dir": log_dir,
+ "log_level": default_log_level,
+ "log_format": '%(asctime)s %(levelname)8s - %(message)s',
+ "log_to_console": True,
+ "append_to_log": False,
+ }
+ log_type = self.config.get("log_type", "multi")
+ for key in log_config.keys():
+ value = self.config.get(key, None)
+ if value is not None:
+ log_config[key] = value
+ if log_type == "multi":
+ self.log_obj = MultiFileLogger(**log_config)
+ else:
+ self.log_obj = SimpleFileLogger(**log_config)
+
+ def action_message(self, message):
+ self.info("[mozharness: %sZ] %s" % (
+ datetime.datetime.utcnow().isoformat(' '), message))
+
+ def summary(self):
+ """Print out all the summary lines added via add_summary()
+ throughout the script.
+
+ I'd like to revisit how to do this in a prettier fashion.
+ """
+ self.action_message("%s summary:" % self.__class__.__name__)
+ if self.summary_list:
+ for item in self.summary_list:
+ try:
+ self.log(item['message'], level=item['level'])
+ except ValueError:
+ """log is closed; print as a default. Ran into this
+ when calling from __del__()"""
+ print "### Log is closed! (%s)" % item['message']
+
+ def add_summary(self, message, level=INFO):
+ self.summary_list.append({'message': message, 'level': level})
+ # TODO write to a summary-only log?
+ # Summaries need a lot more love.
+ self.log(message, level=level)
+
+ def add_failure(self, key, message="%(key)s failed.", level=ERROR,
+ increment_return_code=True):
+ if key not in self.failures:
+ self.failures.append(key)
+ self.add_summary(message % {'key': key}, level=level)
+ if increment_return_code:
+ self.return_code += 1
+
+ def query_failure(self, key):
+ return key in self.failures
+
+ def summarize_success_count(self, success_count, total_count,
+ message="%d of %d successful.",
+ level=None):
+ if level is None:
+ level = INFO
+ if success_count < total_count:
+ level = ERROR
+ self.add_summary(message % (success_count, total_count),
+ level=level)
+
+ def copy_to_upload_dir(self, target, dest=None, short_desc="unknown",
+ long_desc="unknown", log_level=DEBUG,
+ error_level=ERROR, max_backups=None,
+ compress=False, upload_dir=None):
+ """Copy target file to upload_dir/dest.
+
+ Potentially update a manifest in the future if we go that route.
+
+ Currently only copies a single file; would be nice to allow for
+ recursive copying; that would probably done by creating a helper
+ _copy_file_to_upload_dir().
+
+ short_desc and long_desc are placeholders for if/when we add
+ upload_dir manifests.
+ """
+ dest_filename_given = dest is not None
+ if upload_dir is None:
+ upload_dir = self.query_abs_dirs()['abs_upload_dir']
+ if dest is None:
+ dest = os.path.basename(target)
+ if dest.endswith('/'):
+ dest_file = os.path.basename(target)
+ dest_dir = os.path.join(upload_dir, dest)
+ dest_filename_given = False
+ else:
+ dest_file = os.path.basename(dest)
+ dest_dir = os.path.join(upload_dir, os.path.dirname(dest))
+ if compress and not dest_filename_given:
+ dest_file += ".gz"
+ dest = os.path.join(dest_dir, dest_file)
+ if not os.path.exists(target):
+ self.log("%s doesn't exist!" % target, level=error_level)
+ return None
+ self.mkdir_p(dest_dir)
+ if os.path.exists(dest):
+ if os.path.isdir(dest):
+ self.log("%s exists and is a directory!" % dest, level=error_level)
+ return -1
+ if max_backups:
+ # Probably a better way to do this
+ oldest_backup = 0
+ backup_regex = re.compile("^%s\.(\d+)$" % dest_file)
+ for filename in os.listdir(dest_dir):
+ r = backup_regex.match(filename)
+ if r and int(r.groups()[0]) > oldest_backup:
+ oldest_backup = int(r.groups()[0])
+ for backup_num in range(oldest_backup, 0, -1):
+ # TODO more error checking?
+ if backup_num >= max_backups:
+ self.rmtree(os.path.join(dest_dir, "%s.%d" % (dest_file, backup_num)),
+ log_level=log_level)
+ else:
+ self.move(os.path.join(dest_dir, "%s.%d" % (dest_file, backup_num)),
+ os.path.join(dest_dir, "%s.%d" % (dest_file, backup_num + 1)),
+ log_level=log_level)
+ if self.move(dest, "%s.1" % dest, log_level=log_level):
+ self.log("Unable to move %s!" % dest, level=error_level)
+ return -1
+ else:
+ if self.rmtree(dest, log_level=log_level):
+ self.log("Unable to remove %s!" % dest, level=error_level)
+ return -1
+ self.copyfile(target, dest, log_level=log_level, compress=compress)
+ if os.path.exists(dest):
+ return dest
+ else:
+ self.log("%s doesn't exist after copy!" % dest, level=error_level)
+ return None
+
+ def get_hash_for_file(self, file_path, hash_type="sha512"):
+ bs = 65536
+ hasher = hashlib.new(hash_type)
+ with open(file_path, 'rb') as fh:
+ buf = fh.read(bs)
+ while len(buf) > 0:
+ hasher.update(buf)
+ buf = fh.read(bs)
+ return hasher.hexdigest()
+
+ @property
+ def return_code(self):
+ return self._return_code
+
+ @return_code.setter
+ def return_code(self, code):
+ old_return_code, self._return_code = self._return_code, code
+ if old_return_code != code:
+ self.warning("setting return code to %d" % code)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ """ Useless comparison, due to the `pass` keyword on its body"""
+ pass
diff --git a/testing/mozharness/mozharness/base/signing.py b/testing/mozharness/mozharness/base/signing.py
new file mode 100755
index 000000000..d0fe05da2
--- /dev/null
+++ b/testing/mozharness/mozharness/base/signing.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic signing methods.
+"""
+
+import getpass
+import hashlib
+import os
+import re
+import subprocess
+
+from mozharness.base.errors import JarsignerErrorList, ZipErrorList, ZipalignErrorList
+from mozharness.base.log import OutputParser, IGNORE, DEBUG, INFO, ERROR, FATAL
+
+UnsignApkErrorList = [{
+ 'regex': re.compile(r'''zip warning: name not matched: '?META-INF/'''),
+ 'level': INFO,
+ 'explanation': r'''This apk is already unsigned.''',
+}, {
+ 'substr': r'''zip error: Nothing to do!''',
+ 'level': IGNORE,
+}] + ZipErrorList
+
+TestJarsignerErrorList = [{
+ "substr": "jarsigner: unable to open jar file:",
+ "level": IGNORE,
+}] + JarsignerErrorList
+
+
+# BaseSigningMixin {{{1
+class BaseSigningMixin(object):
+ """Generic signing helper methods.
+ """
+ def query_filesize(self, file_path):
+ self.info("Determining filesize for %s" % file_path)
+ length = os.path.getsize(file_path)
+ self.info(" %s" % str(length))
+ return length
+
+ # TODO this should be parallelized with the to-be-written BaseHelper!
+ def query_sha512sum(self, file_path):
+ self.info("Determining sha512sum for %s" % file_path)
+ m = hashlib.sha512()
+ contents = self.read_from_file(file_path, verbose=False,
+ open_mode='rb')
+ m.update(contents)
+ sha512 = m.hexdigest()
+ self.info(" %s" % sha512)
+ return sha512
+
+
+# AndroidSigningMixin {{{1
+class AndroidSigningMixin(object):
+ """
+ Generic Android apk signing methods.
+
+ Dependent on BaseScript.
+ """
+ # TODO port build/tools/release/signing/verify-android-signature.sh here
+
+ key_passphrase = os.environ.get('android_keypass')
+ store_passphrase = os.environ.get('android_storepass')
+
+ def passphrase(self):
+ if not self.store_passphrase:
+ self.store_passphrase = getpass.getpass("Store passphrase: ")
+ if not self.key_passphrase:
+ self.key_passphrase = getpass.getpass("Key passphrase: ")
+
+ def _verify_passphrases(self, keystore, key_alias, error_level=FATAL):
+ self.info("Verifying passphrases...")
+ status = self.sign_apk("NOTAREALAPK", keystore,
+ self.store_passphrase, self.key_passphrase,
+ key_alias, remove_signature=False,
+ log_level=DEBUG, error_level=DEBUG,
+ error_list=TestJarsignerErrorList)
+ if status == 0:
+ self.info("Passphrases are good.")
+ elif status < 0:
+ self.log("Encountered errors while trying to sign!",
+ level=error_level)
+ else:
+ self.log("Unable to verify passphrases!",
+ level=error_level)
+ return status
+
+ def verify_passphrases(self):
+ c = self.config
+ self._verify_passphrases(c['keystore'], c['key_alias'])
+
+ def postflight_passphrase(self):
+ self.verify_passphrases()
+
+ def sign_apk(self, apk, keystore, storepass, keypass, key_alias,
+ remove_signature=True, error_list=None,
+ log_level=INFO, error_level=ERROR):
+ """
+ Signs an apk with jarsigner.
+ """
+ jarsigner = self.query_exe('jarsigner')
+ if remove_signature:
+ status = self.unsign_apk(apk)
+ if status:
+ self.error("Can't remove signature in %s!" % apk)
+ return -1
+ if error_list is None:
+ error_list = JarsignerErrorList[:]
+ # This needs to run silently, so no run_command() or
+ # get_output_from_command() (though I could add a
+ # suppress_command_echo=True or something?)
+ self.log("(signing %s)" % apk, level=log_level)
+ try:
+ p = subprocess.Popen([jarsigner, "-keystore", keystore,
+ "-storepass", storepass,
+ "-keypass", keypass,
+ apk, key_alias],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ except OSError:
+ self.exception("Error while signing %s (missing %s?):" % (apk, jarsigner))
+ return -2
+ except ValueError:
+ self.exception("Popen called with invalid arguments during signing?")
+ return -3
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=error_list)
+ loop = True
+ while loop:
+ if p.poll() is not None:
+ """Avoid losing the final lines of the log?"""
+ loop = False
+ for line in p.stdout:
+ parser.add_lines(line)
+ if parser.num_errors:
+ self.log("(failure)", level=error_level)
+ else:
+ self.log("(success)", level=log_level)
+ return parser.num_errors
+
+ def unsign_apk(self, apk, **kwargs):
+ zip_bin = self.query_exe("zip")
+ return self.run_command([zip_bin, apk, '-d', 'META-INF/*'],
+ error_list=UnsignApkErrorList,
+ success_codes=[0, 12],
+ return_type='num_errors', **kwargs)
+
+ def align_apk(self, unaligned_apk, aligned_apk, error_level=ERROR):
+ """
+ Zipalign apk.
+ Returns None on success, not None on failure.
+ """
+ dirs = self.query_abs_dirs()
+ zipalign = self.query_exe("zipalign")
+ if self.run_command([zipalign, '-f', '4',
+ unaligned_apk, aligned_apk],
+ return_type='num_errors',
+ cwd=dirs['abs_work_dir'],
+ error_list=ZipalignErrorList):
+ self.log("Unable to zipalign %s to %s!" % (unaligned_apk, aligned_apk), level=error_level)
+ return -1
diff --git a/testing/mozharness/mozharness/base/transfer.py b/testing/mozharness/mozharness/base/transfer.py
new file mode 100755
index 000000000..014c665a1
--- /dev/null
+++ b/testing/mozharness/mozharness/base/transfer.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic ways to upload + download files.
+"""
+
+import os
+import pprint
+import urllib2
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+from mozharness.base.errors import SSHErrorList
+from mozharness.base.log import DEBUG, ERROR
+
+
+# TransferMixin {{{1
+class TransferMixin(object):
+ """
+ Generic transfer methods.
+
+ Dependent on BaseScript.
+ """
+ def rsync_upload_directory(self, local_path, ssh_key, ssh_user,
+ remote_host, remote_path,
+ rsync_options=None,
+ error_level=ERROR,
+ create_remote_directory=True,
+ ):
+ """
+ Create a remote directory and upload the contents of
+ a local directory to it via rsync+ssh.
+
+ Returns:
+ None: on success
+ -1: if local_path is not a directory
+ -2: if the remote_directory cannot be created
+ (it only makes sense if create_remote_directory is True)
+ -3: rsync fails to copy to the remote directory
+ """
+ dirs = self.query_abs_dirs()
+ self.info("Uploading the contents of %s to %s:%s" % (local_path, remote_host, remote_path))
+ rsync = self.query_exe("rsync")
+ ssh = self.query_exe("ssh")
+ if rsync_options is None:
+ rsync_options = ['-azv']
+ if not os.path.isdir(local_path):
+ self.log("%s isn't a directory!" % local_path,
+ level=ERROR)
+ return -1
+ if create_remote_directory:
+ mkdir_error_list = [{
+ 'substr': r'''exists but is not a directory''',
+ 'level': ERROR
+ }] + SSHErrorList
+ if self.run_command([ssh, '-oIdentityFile=%s' % ssh_key,
+ '%s@%s' % (ssh_user, remote_host),
+ 'mkdir', '-p', remote_path],
+ cwd=dirs['abs_work_dir'],
+ return_type='num_errors',
+ error_list=mkdir_error_list):
+ self.log("Unable to create remote directory %s:%s!" % (remote_host, remote_path), level=error_level)
+ return -2
+ if self.run_command([rsync, '-e',
+ '%s -oIdentityFile=%s' % (ssh, ssh_key)
+ ] + rsync_options + ['.',
+ '%s@%s:%s/' % (ssh_user, remote_host, remote_path)],
+ cwd=local_path,
+ return_type='num_errors',
+ error_list=SSHErrorList):
+ self.log("Unable to rsync %s to %s:%s!" % (local_path, remote_host, remote_path), level=error_level)
+ return -3
+
+ def rsync_download_directory(self, ssh_key, ssh_user, remote_host,
+ remote_path, local_path,
+ rsync_options=None,
+ error_level=ERROR,
+ ):
+ """
+ rsync+ssh the content of a remote directory to local_path
+
+ Returns:
+ None: on success
+ -1: if local_path is not a directory
+ -3: rsync fails to download from the remote directory
+ """
+ self.info("Downloading the contents of %s:%s to %s" % (remote_host, remote_path, local_path))
+ rsync = self.query_exe("rsync")
+ ssh = self.query_exe("ssh")
+ if rsync_options is None:
+ rsync_options = ['-azv']
+ if not os.path.isdir(local_path):
+ self.log("%s isn't a directory!" % local_path,
+ level=error_level)
+ return -1
+ if self.run_command([rsync, '-e',
+ '%s -oIdentityFile=%s' % (ssh, ssh_key)
+ ] + rsync_options + [
+ '%s@%s:%s/' % (ssh_user, remote_host, remote_path),
+ '.'],
+ cwd=local_path,
+ return_type='num_errors',
+ error_list=SSHErrorList):
+ self.log("Unable to rsync %s:%s to %s!" % (remote_host, remote_path, local_path), level=error_level)
+ return -3
+
+ def load_json_from_url(self, url, timeout=30, log_level=DEBUG):
+ self.log("Attempting to download %s; timeout=%i" % (url, timeout),
+ level=log_level)
+ try:
+ r = urllib2.urlopen(url, timeout=timeout)
+ j = json.load(r)
+ self.log(pprint.pformat(j), level=log_level)
+ except:
+ self.exception(message="Unable to download %s!" % url)
+ raise
+ return j
diff --git a/testing/mozharness/mozharness/base/vcs/__init__.py b/testing/mozharness/mozharness/base/vcs/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/base/vcs/__init__.py
diff --git a/testing/mozharness/mozharness/base/vcs/gittool.py b/testing/mozharness/mozharness/base/vcs/gittool.py
new file mode 100644
index 000000000..d6c609ea0
--- /dev/null
+++ b/testing/mozharness/mozharness/base/vcs/gittool.py
@@ -0,0 +1,95 @@
+import os
+import re
+import urlparse
+
+from mozharness.base.script import ScriptMixin
+from mozharness.base.log import LogMixin, OutputParser
+from mozharness.base.errors import GitErrorList, VCSException
+
+
+class GittoolParser(OutputParser):
+ """
+ A class that extends OutputParser such that it can find the "Got revision"
+ string from gittool.py output
+ """
+
+ got_revision_exp = re.compile(r'Got revision (\w+)')
+ got_revision = None
+
+ def parse_single_line(self, line):
+ m = self.got_revision_exp.match(line)
+ if m:
+ self.got_revision = m.group(1)
+ super(GittoolParser, self).parse_single_line(line)
+
+
+class GittoolVCS(ScriptMixin, LogMixin):
+ def __init__(self, log_obj=None, config=None, vcs_config=None,
+ script_obj=None):
+ super(GittoolVCS, self).__init__()
+
+ self.log_obj = log_obj
+ self.script_obj = script_obj
+ if config:
+ self.config = config
+ else:
+ self.config = {}
+ # vcs_config = {
+ # repo: repository,
+ # branch: branch,
+ # revision: revision,
+ # ssh_username: ssh_username,
+ # ssh_key: ssh_key,
+ # }
+ self.vcs_config = vcs_config
+ self.gittool = self.query_exe('gittool.py', return_type='list')
+
+ def ensure_repo_and_revision(self):
+ """Makes sure that `dest` is has `revision` or `branch` checked out
+ from `repo`.
+
+ Do what it takes to make that happen, including possibly clobbering
+ dest.
+ """
+ c = self.vcs_config
+ for conf_item in ('dest', 'repo'):
+ assert self.vcs_config[conf_item]
+ dest = os.path.abspath(c['dest'])
+ repo = c['repo']
+ revision = c.get('revision')
+ branch = c.get('branch')
+ clean = c.get('clean')
+ share_base = c.get('vcs_share_base', os.environ.get("GIT_SHARE_BASE_DIR", None))
+ env = {'PATH': os.environ.get('PATH')}
+ env.update(c.get('env', {}))
+ if self._is_windows():
+ # git.exe is not in the PATH by default
+ env['PATH'] = '%s;C:/mozilla-build/Git/bin' % env['PATH']
+ # SYSTEMROOT is needed for 'import random'
+ if 'SYSTEMROOT' not in env:
+ env['SYSTEMROOT'] = os.environ.get('SYSTEMROOT')
+ if share_base is not None:
+ env['GIT_SHARE_BASE_DIR'] = share_base
+
+ cmd = self.gittool[:]
+ if branch:
+ cmd.extend(['-b', branch])
+ if revision:
+ cmd.extend(['-r', revision])
+ if clean:
+ cmd.append('--clean')
+
+ for base_mirror_url in self.config.get('gittool_base_mirror_urls', self.config.get('vcs_base_mirror_urls', [])):
+ bits = urlparse.urlparse(repo)
+ mirror_url = urlparse.urljoin(base_mirror_url, bits.path)
+ cmd.extend(['--mirror', mirror_url])
+
+ cmd.extend([repo, dest])
+ parser = GittoolParser(config=self.config, log_obj=self.log_obj,
+ error_list=GitErrorList)
+ retval = self.run_command(cmd, error_list=GitErrorList, env=env, output_parser=parser)
+
+ if retval != 0:
+ raise VCSException("Unable to checkout")
+
+ return parser.got_revision
diff --git a/testing/mozharness/mozharness/base/vcs/mercurial.py b/testing/mozharness/mozharness/base/vcs/mercurial.py
new file mode 100755
index 000000000..71e5e3ea0
--- /dev/null
+++ b/testing/mozharness/mozharness/base/vcs/mercurial.py
@@ -0,0 +1,497 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Mercurial VCS support.
+"""
+
+import os
+import re
+import subprocess
+from collections import namedtuple
+from urlparse import urlsplit
+import hashlib
+
+import sys
+sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.dirname(sys.path[0]))))
+
+import mozharness
+from mozharness.base.errors import HgErrorList, VCSException
+from mozharness.base.log import LogMixin, OutputParser
+from mozharness.base.script import ScriptMixin
+from mozharness.base.transfer import TransferMixin
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+
+HG_OPTIONS = ['--config', 'ui.merge=internal:merge']
+
+# MercurialVCS {{{1
+# TODO Make the remaining functions more mozharness-friendly.
+# TODO Add the various tag functionality that are currently in
+# build/tools/scripts to MercurialVCS -- generic tagging logic belongs here.
+REVISION, BRANCH = 0, 1
+
+
+class RepositoryUpdateRevisionParser(OutputParser):
+ """Parse `hg pull` output for "repository unrelated" errors."""
+ revision = None
+ RE_UPDATED = re.compile('^updated to ([a-f0-9]{40})$')
+
+ def parse_single_line(self, line):
+ m = self.RE_UPDATED.match(line)
+ if m:
+ self.revision = m.group(1)
+
+ return super(RepositoryUpdateRevisionParser, self).parse_single_line(line)
+
+
+def make_hg_url(hg_host, repo_path, protocol='http', revision=None,
+ filename=None):
+ """Helper function.
+
+ Construct a valid hg url from a base hg url (hg.mozilla.org),
+ repo_path, revision and possible filename
+ """
+ base = '%s://%s' % (protocol, hg_host)
+ repo = '/'.join(p.strip('/') for p in [base, repo_path])
+ if not filename:
+ if not revision:
+ return repo
+ else:
+ return '/'.join([p.strip('/') for p in [repo, 'rev', revision]])
+ else:
+ assert revision
+ return '/'.join([p.strip('/') for p in [repo, 'raw-file', revision, filename]])
+
+
+class MercurialVCS(ScriptMixin, LogMixin, TransferMixin):
+ # For the most part, scripts import mercurial, update
+ # tag-release.py imports
+ # apply_and_push, update, get_revision, out, BRANCH, REVISION,
+ # get_branches, cleanOutgoingRevs
+
+ def __init__(self, log_obj=None, config=None, vcs_config=None,
+ script_obj=None):
+ super(MercurialVCS, self).__init__()
+ self.can_share = None
+ self.log_obj = log_obj
+ self.script_obj = script_obj
+ if config:
+ self.config = config
+ else:
+ self.config = {}
+ # vcs_config = {
+ # hg_host: hg_host,
+ # repo: repository,
+ # branch: branch,
+ # revision: revision,
+ # ssh_username: ssh_username,
+ # ssh_key: ssh_key,
+ # }
+ self.vcs_config = vcs_config or {}
+ self.hg = self.query_exe("hg", return_type="list") + HG_OPTIONS
+
+ def _make_absolute(self, repo):
+ if repo.startswith("file://"):
+ path = repo[len("file://"):]
+ repo = "file://%s" % os.path.abspath(path)
+ elif "://" not in repo:
+ repo = os.path.abspath(repo)
+ return repo
+
+ def get_repo_name(self, repo):
+ return repo.rstrip('/').split('/')[-1]
+
+ def get_repo_path(self, repo):
+ repo = self._make_absolute(repo)
+ if repo.startswith("/"):
+ return repo.lstrip("/")
+ else:
+ return urlsplit(repo).path.lstrip("/")
+
+ def get_revision_from_path(self, path):
+ """Returns which revision directory `path` currently has checked out."""
+ return self.get_output_from_command(
+ self.hg + ['parent', '--template', '{node}'], cwd=path
+ )
+
+ def get_branch_from_path(self, path):
+ branch = self.get_output_from_command(self.hg + ['branch'], cwd=path)
+ return str(branch).strip()
+
+ def get_branches_from_path(self, path):
+ branches = []
+ for line in self.get_output_from_command(self.hg + ['branches', '-c'],
+ cwd=path).splitlines():
+ branches.append(line.split()[0])
+ return branches
+
+ def hg_ver(self):
+ """Returns the current version of hg, as a tuple of
+ (major, minor, build)"""
+ ver_string = self.get_output_from_command(self.hg + ['-q', 'version'])
+ match = re.search("\(version ([0-9.]+)\)", ver_string)
+ if match:
+ bits = match.group(1).split(".")
+ if len(bits) < 3:
+ bits += (0,)
+ ver = tuple(int(b) for b in bits)
+ else:
+ ver = (0, 0, 0)
+ self.debug("Running hg version %s" % str(ver))
+ return ver
+
+ def update(self, dest, branch=None, revision=None):
+ """Updates working copy `dest` to `branch` or `revision`.
+ If revision is set, branch will be ignored.
+ If neither is set then the working copy will be updated to the
+ latest revision on the current branch. Local changes will be
+ discarded.
+ """
+ # If we have a revision, switch to that
+ msg = "Updating %s" % dest
+ if branch:
+ msg += " to branch %s" % branch
+ if revision:
+ msg += " revision %s" % revision
+ self.info("%s." % msg)
+ if revision is not None:
+ cmd = self.hg + ['update', '-C', '-r', revision]
+ if self.run_command(cmd, cwd=dest, error_list=HgErrorList):
+ raise VCSException("Unable to update %s to %s!" % (dest, revision))
+ else:
+ # Check & switch branch
+ local_branch = self.get_branch_from_path(dest)
+
+ cmd = self.hg + ['update', '-C']
+
+ # If this is different, checkout the other branch
+ if branch and branch != local_branch:
+ cmd.append(branch)
+
+ if self.run_command(cmd, cwd=dest, error_list=HgErrorList):
+ raise VCSException("Unable to update %s!" % dest)
+ return self.get_revision_from_path(dest)
+
+ def clone(self, repo, dest, branch=None, revision=None, update_dest=True):
+ """Clones hg repo and places it at `dest`, replacing whatever else
+ is there. The working copy will be empty.
+
+ If `revision` is set, only the specified revision and its ancestors
+ will be cloned. If revision is set, branch is ignored.
+
+ If `update_dest` is set, then `dest` will be updated to `revision`
+ if set, otherwise to `branch`, otherwise to the head of default.
+ """
+ msg = "Cloning %s to %s" % (repo, dest)
+ if branch:
+ msg += " on branch %s" % branch
+ if revision:
+ msg += " to revision %s" % revision
+ self.info("%s." % msg)
+ parent_dest = os.path.dirname(dest)
+ if parent_dest and not os.path.exists(parent_dest):
+ self.mkdir_p(parent_dest)
+ if os.path.exists(dest):
+ self.info("Removing %s before clone." % dest)
+ self.rmtree(dest)
+
+ cmd = self.hg + ['clone']
+ if not update_dest:
+ cmd.append('-U')
+
+ if revision:
+ cmd.extend(['-r', revision])
+ elif branch:
+ # hg >= 1.6 supports -b branch for cloning
+ ver = self.hg_ver()
+ if ver >= (1, 6, 0):
+ cmd.extend(['-b', branch])
+
+ cmd.extend([repo, dest])
+ output_timeout = self.config.get("vcs_output_timeout",
+ self.vcs_config.get("output_timeout"))
+ if self.run_command(cmd, error_list=HgErrorList,
+ output_timeout=output_timeout) != 0:
+ raise VCSException("Unable to clone %s to %s!" % (repo, dest))
+
+ if update_dest:
+ return self.update(dest, branch, revision)
+
+ def common_args(self, revision=None, branch=None, ssh_username=None,
+ ssh_key=None):
+ """Fill in common hg arguments, encapsulating logic checks that
+ depend on mercurial versions and provided arguments
+ """
+ args = []
+ if ssh_username or ssh_key:
+ opt = ['-e', 'ssh']
+ if ssh_username:
+ opt[1] += ' -l %s' % ssh_username
+ if ssh_key:
+ opt[1] += ' -i %s' % ssh_key
+ args.extend(opt)
+ if revision:
+ args.extend(['-r', revision])
+ elif branch:
+ if self.hg_ver() >= (1, 6, 0):
+ args.extend(['-b', branch])
+ return args
+
+ def pull(self, repo, dest, update_dest=True, **kwargs):
+ """Pulls changes from hg repo and places it in `dest`.
+
+ If `revision` is set, only the specified revision and its ancestors
+ will be pulled.
+
+ If `update_dest` is set, then `dest` will be updated to `revision`
+ if set, otherwise to `branch`, otherwise to the head of default.
+ """
+ msg = "Pulling %s to %s" % (repo, dest)
+ if update_dest:
+ msg += " and updating"
+ self.info("%s." % msg)
+ if not os.path.exists(dest):
+ # Error or clone?
+ # If error, should we have a halt_on_error=False above?
+ self.error("Can't hg pull in nonexistent directory %s." % dest)
+ return -1
+ # Convert repo to an absolute path if it's a local repository
+ repo = self._make_absolute(repo)
+ cmd = self.hg + ['pull']
+ cmd.extend(self.common_args(**kwargs))
+ cmd.append(repo)
+ output_timeout = self.config.get("vcs_output_timeout",
+ self.vcs_config.get("output_timeout"))
+ if self.run_command(cmd, cwd=dest, error_list=HgErrorList,
+ output_timeout=output_timeout) != 0:
+ raise VCSException("Can't pull in %s!" % dest)
+
+ if update_dest:
+ branch = self.vcs_config.get('branch')
+ revision = self.vcs_config.get('revision')
+ return self.update(dest, branch=branch, revision=revision)
+
+ # Defines the places of attributes in the tuples returned by `out'
+
+ def out(self, src, remote, **kwargs):
+ """Check for outgoing changesets present in a repo"""
+ self.info("Checking for outgoing changesets from %s to %s." % (src, remote))
+ cmd = self.hg + ['-q', 'out', '--template', '{node} {branches}\n']
+ cmd.extend(self.common_args(**kwargs))
+ cmd.append(remote)
+ if os.path.exists(src):
+ try:
+ revs = []
+ for line in self.get_output_from_command(cmd, cwd=src, throw_exception=True).rstrip().split("\n"):
+ try:
+ rev, branch = line.split()
+ # Mercurial displays no branch at all if the revision
+ # is on "default"
+ except ValueError:
+ rev = line.rstrip()
+ branch = "default"
+ revs.append((rev, branch))
+ return revs
+ except subprocess.CalledProcessError, inst:
+ # In some situations, some versions of Mercurial return "1"
+ # if no changes are found, so we need to ignore this return
+ # code
+ if inst.returncode == 1:
+ return []
+ raise
+
+ def push(self, src, remote, push_new_branches=True, **kwargs):
+ # This doesn't appear to work with hg_ver < (1, 6, 0).
+ # Error out, or let you try?
+ self.info("Pushing new changes from %s to %s." % (src, remote))
+ cmd = self.hg + ['push']
+ cmd.extend(self.common_args(**kwargs))
+ if push_new_branches and self.hg_ver() >= (1, 6, 0):
+ cmd.append('--new-branch')
+ cmd.append(remote)
+ status = self.run_command(cmd, cwd=src, error_list=HgErrorList, success_codes=(0, 1),
+ return_type="num_errors")
+ if status:
+ raise VCSException("Can't push %s to %s!" % (src, remote))
+ return status
+
+ @property
+ def robustcheckout_path(self):
+ """Path to the robustcheckout extension."""
+ ext = os.path.join(external_tools_path, 'robustcheckout.py')
+ if os.path.exists(ext):
+ return ext
+
+ def ensure_repo_and_revision(self):
+ """Makes sure that `dest` is has `revision` or `branch` checked out
+ from `repo`.
+
+ Do what it takes to make that happen, including possibly clobbering
+ dest.
+ """
+ c = self.vcs_config
+ dest = c['dest']
+ repo_url = c['repo']
+ rev = c.get('revision')
+ branch = c.get('branch')
+ purge = c.get('clone_with_purge', False)
+ upstream = c.get('clone_upstream_url')
+
+ # The API here is kind of bad because we're relying on state in
+ # self.vcs_config instead of passing arguments. This confuses
+ # scripts that have multiple repos. This includes the clone_tools()
+ # step :(
+
+ if not rev and not branch:
+ self.warning('did not specify revision or branch; assuming "default"')
+ branch = 'default'
+
+ share_base = c.get('vcs_share_base') or os.environ.get('HG_SHARE_BASE_DIR')
+ if share_base and c.get('use_vcs_unique_share'):
+ # Bug 1277041 - update migration scripts to support robustcheckout
+ # fake a share but don't really share
+ share_base = os.path.join(share_base, hashlib.md5(dest).hexdigest())
+
+ # We require shared storage is configured because it guarantees we
+ # only have 1 local copy of logical repo stores.
+ if not share_base:
+ raise VCSException('vcs share base not defined; '
+ 'refusing to operate sub-optimally')
+
+ if not self.robustcheckout_path:
+ raise VCSException('could not find the robustcheckout Mercurial extension')
+
+ # Log HG version and install info to aid debugging.
+ self.run_command(self.hg + ['--version'])
+ self.run_command(self.hg + ['debuginstall'])
+
+ args = self.hg + [
+ '--config', 'extensions.robustcheckout=%s' % self.robustcheckout_path,
+ 'robustcheckout', repo_url, dest, '--sharebase', share_base,
+ ]
+ if purge:
+ args.append('--purge')
+ if upstream:
+ args.extend(['--upstream', upstream])
+
+ if rev:
+ args.extend(['--revision', rev])
+ if branch:
+ args.extend(['--branch', branch])
+
+ parser = RepositoryUpdateRevisionParser(config=self.config,
+ log_obj=self.log_obj)
+ if self.run_command(args, output_parser=parser):
+ raise VCSException('repo checkout failed!')
+
+ if not parser.revision:
+ raise VCSException('could not identify revision updated to')
+
+ return parser.revision
+
+ def apply_and_push(self, localrepo, remote, changer, max_attempts=10,
+ ssh_username=None, ssh_key=None):
+ """This function calls `changer' to make changes to the repo, and
+ tries its hardest to get them to the origin repo. `changer' must be
+ a callable object that receives two arguments: the directory of the
+ local repository, and the attempt number. This function will push
+ ALL changesets missing from remote.
+ """
+ self.info("Applying and pushing local changes from %s to %s." % (localrepo, remote))
+ assert callable(changer)
+ branch = self.get_branch_from_path(localrepo)
+ changer(localrepo, 1)
+ for n in range(1, max_attempts + 1):
+ try:
+ new_revs = self.out(src=localrepo, remote=remote,
+ ssh_username=ssh_username,
+ ssh_key=ssh_key)
+ if len(new_revs) < 1:
+ raise VCSException("No revs to push")
+ self.push(src=localrepo, remote=remote,
+ ssh_username=ssh_username,
+ ssh_key=ssh_key)
+ return
+ except VCSException, e:
+ self.debug("Hit error when trying to push: %s" % str(e))
+ if n == max_attempts:
+ self.debug("Tried %d times, giving up" % max_attempts)
+ for r in reversed(new_revs):
+ self.run_command(self.hg + ['strip', '-n', r[REVISION]],
+ cwd=localrepo, error_list=HgErrorList)
+ raise VCSException("Failed to push")
+ self.pull(remote, localrepo, update_dest=False,
+ ssh_username=ssh_username, ssh_key=ssh_key)
+ # After we successfully rebase or strip away heads the push
+ # is is attempted again at the start of the loop
+ try:
+ self.run_command(self.hg + ['rebase'], cwd=localrepo,
+ error_list=HgErrorList,
+ throw_exception=True)
+ except subprocess.CalledProcessError, e:
+ self.debug("Failed to rebase: %s" % str(e))
+ # clean up any hanging rebase. ignore errors if we aren't
+ # in the middle of a rebase.
+ self.run_command(self.hg + ['rebase', '--abort'],
+ cwd=localrepo, success_codes=[0, 255])
+ self.update(localrepo, branch=branch)
+ for r in reversed(new_revs):
+ self.run_command(self.hg + ['strip', '-n', r[REVISION]],
+ cwd=localrepo, error_list=HgErrorList)
+ changer(localrepo, n + 1)
+
+ def cleanOutgoingRevs(self, reponame, remote, username, sshKey):
+ # TODO retry
+ self.info("Wiping outgoing local changes from %s to %s." % (reponame, remote))
+ outgoingRevs = self.out(src=reponame, remote=remote,
+ ssh_username=username, ssh_key=sshKey)
+ for r in reversed(outgoingRevs):
+ self.run_command(self.hg + ['strip', '-n', r[REVISION]],
+ cwd=reponame, error_list=HgErrorList)
+
+ def query_pushinfo(self, repository, revision):
+ """Query the pushdate and pushid of a repository/revision.
+ This is intended to be used on hg.mozilla.org/mozilla-central and
+ similar. It may or may not work for other hg repositories.
+ """
+ PushInfo = namedtuple('PushInfo', ['pushid', 'pushdate'])
+
+ try:
+ url = '%s/json-pushes?changeset=%s' % (repository, revision)
+ self.info('Pushdate URL is: %s' % url)
+ contents = self.retry(self.load_json_from_url, args=(url,))
+
+ # The contents should be something like:
+ # {
+ # "28537": {
+ # "changesets": [
+ # "1d0a914ae676cc5ed203cdc05c16d8e0c22af7e5",
+ # ],
+ # "date": 1428072488,
+ # "user": "user@mozilla.com"
+ # }
+ # }
+ #
+ # So we grab the first element ("28537" in this case) and then pull
+ # out the 'date' field.
+ pushid = contents.iterkeys().next()
+ self.info('Pushid is: %s' % pushid)
+ pushdate = contents[pushid]['date']
+ self.info('Pushdate is: %s' % pushdate)
+ return PushInfo(pushid, pushdate)
+
+ except Exception:
+ self.exception("Failed to get push info from hg.mozilla.org")
+ raise
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ pass
diff --git a/testing/mozharness/mozharness/base/vcs/tcvcs.py b/testing/mozharness/mozharness/base/vcs/tcvcs.py
new file mode 100644
index 000000000..55fca4afd
--- /dev/null
+++ b/testing/mozharness/mozharness/base/vcs/tcvcs.py
@@ -0,0 +1,49 @@
+import os.path
+from mozharness.base.script import ScriptMixin
+from mozharness.base.log import LogMixin
+
+class TcVCS(ScriptMixin, LogMixin):
+ def __init__(self, log_obj=None, config=None, vcs_config=None,
+ script_obj=None):
+ super(TcVCS, self).__init__()
+
+ self.log_obj = log_obj
+ self.script_obj = script_obj
+ if config:
+ self.config = config
+ else:
+ self.config = {}
+ # vcs_config = {
+ # repo: repository,
+ # branch: branch,
+ # revision: revision,
+ # ssh_username: ssh_username,
+ # ssh_key: ssh_key,
+ # }
+ self.vcs_config = vcs_config
+ self.tc_vcs = self.query_exe('tc-vcs', return_type='list')
+
+ def ensure_repo_and_revision(self):
+ """Makes sure that `dest` is has `revision` or `branch` checked out
+ from `repo`.
+
+ Do what it takes to make that happen, including possibly clobbering
+ dest.
+ """
+ c = self.vcs_config
+ for conf_item in ('dest', 'repo'):
+ assert self.vcs_config[conf_item]
+
+ dest = os.path.abspath(c['dest'])
+ repo = c['repo']
+ branch = c.get('branch', '')
+ revision = c.get('revision', '')
+ if revision is None:
+ revision = ''
+ base_repo = self.config.get('base_repo', repo)
+
+ cmd = [self.tc_vcs[:][0], 'checkout', dest, base_repo, repo, revision, branch]
+ self.run_command(cmd)
+
+ cmd = [self.tc_vcs[:][0], 'revision', dest]
+ return self.get_output_from_command(cmd)
diff --git a/testing/mozharness/mozharness/base/vcs/vcsbase.py b/testing/mozharness/mozharness/base/vcs/vcsbase.py
new file mode 100755
index 000000000..60ba5b79c
--- /dev/null
+++ b/testing/mozharness/mozharness/base/vcs/vcsbase.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic VCS support.
+"""
+
+from copy import deepcopy
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.dirname(sys.path[0]))))
+
+from mozharness.base.errors import VCSException
+from mozharness.base.log import FATAL
+from mozharness.base.script import BaseScript
+from mozharness.base.vcs.mercurial import MercurialVCS
+from mozharness.base.vcs.gittool import GittoolVCS
+from mozharness.base.vcs.tcvcs import TcVCS
+
+# Update this with supported VCS name : VCS object
+VCS_DICT = {
+ 'hg': MercurialVCS,
+ 'gittool': GittoolVCS,
+ 'tc-vcs': TcVCS,
+}
+
+
+# VCSMixin {{{1
+class VCSMixin(object):
+ """Basic VCS methods that are vcs-agnostic.
+ The vcs_class handles all the vcs-specific tasks.
+ """
+ def query_dest(self, kwargs):
+ if 'dest' in kwargs:
+ return kwargs['dest']
+ dest = os.path.basename(kwargs['repo'])
+ # Git fun
+ if dest.endswith('.git'):
+ dest = dest.replace('.git', '')
+ return dest
+
+ def _get_revision(self, vcs_obj, dest):
+ try:
+ got_revision = vcs_obj.ensure_repo_and_revision()
+ if got_revision:
+ return got_revision
+ except VCSException:
+ self.rmtree(dest)
+ raise
+
+ def _get_vcs_class(self, vcs):
+ vcs = vcs or self.config.get('default_vcs', getattr(self, 'default_vcs', None))
+ vcs_class = VCS_DICT.get(vcs)
+ return vcs_class
+
+ def vcs_checkout(self, vcs=None, error_level=FATAL, **kwargs):
+ """ Check out a single repo.
+ """
+ c = self.config
+ vcs_class = self._get_vcs_class(vcs)
+ if not vcs_class:
+ self.error("Running vcs_checkout with kwargs %s" % str(kwargs))
+ raise VCSException("No VCS set!")
+ # need a better way to do this.
+ if 'dest' not in kwargs:
+ kwargs['dest'] = self.query_dest(kwargs)
+ if 'vcs_share_base' not in kwargs:
+ kwargs['vcs_share_base'] = c.get('%s_share_base' % vcs, c.get('vcs_share_base'))
+ vcs_obj = vcs_class(
+ log_obj=self.log_obj,
+ config=self.config,
+ vcs_config=kwargs,
+ script_obj=self,
+ )
+ return self.retry(
+ self._get_revision,
+ error_level=error_level,
+ error_message="Automation Error: Can't checkout %s!" % kwargs['repo'],
+ args=(vcs_obj, kwargs['dest']),
+ )
+
+ def vcs_checkout_repos(self, repo_list, parent_dir=None,
+ tag_override=None, **kwargs):
+ """Check out a list of repos.
+ """
+ orig_dir = os.getcwd()
+ c = self.config
+ if not parent_dir:
+ parent_dir = os.path.join(c['base_work_dir'], c['work_dir'])
+ self.mkdir_p(parent_dir)
+ self.chdir(parent_dir)
+ revision_dict = {}
+ kwargs_orig = deepcopy(kwargs)
+ for repo_dict in repo_list:
+ kwargs = deepcopy(kwargs_orig)
+ kwargs.update(repo_dict)
+ if tag_override:
+ kwargs['branch'] = tag_override
+ dest = self.query_dest(kwargs)
+ revision_dict[dest] = {'repo': kwargs['repo']}
+ revision_dict[dest]['revision'] = self.vcs_checkout(**kwargs)
+ self.chdir(orig_dir)
+ return revision_dict
+
+ def vcs_query_pushinfo(self, repository, revision, vcs=None):
+ """Query the pushid/pushdate of a repository/revision
+ Returns a namedtuple with "pushid" and "pushdate" elements
+ """
+ vcs_class = self._get_vcs_class(vcs)
+ if not vcs_class:
+ raise VCSException("No VCS set in vcs_query_pushinfo!")
+ vcs_obj = vcs_class(
+ log_obj=self.log_obj,
+ config=self.config,
+ script_obj=self,
+ )
+ return vcs_obj.query_pushinfo(repository, revision)
+
+
+class VCSScript(VCSMixin, BaseScript):
+ def __init__(self, **kwargs):
+ super(VCSScript, self).__init__(**kwargs)
+
+ def pull(self, repos=None, parent_dir=None):
+ repos = repos or self.config.get('repos')
+ if not repos:
+ self.info("Pull has nothing to do!")
+ return
+ dirs = self.query_abs_dirs()
+ parent_dir = parent_dir or dirs['abs_work_dir']
+ return self.vcs_checkout_repos(repos,
+ parent_dir=parent_dir)
+
+
+# Specific VCS stubs {{{1
+# For ease of use.
+# This is here instead of mercurial.py because importing MercurialVCS into
+# vcsbase from mercurial, and importing VCSScript into mercurial from
+# vcsbase, was giving me issues.
+class MercurialScript(VCSScript):
+ default_vcs = 'hg'
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ pass
diff --git a/testing/mozharness/mozharness/base/vcs/vcssync.py b/testing/mozharness/mozharness/base/vcs/vcssync.py
new file mode 100644
index 000000000..ffecb16b7
--- /dev/null
+++ b/testing/mozharness/mozharness/base/vcs/vcssync.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Generic VCS support.
+"""
+
+import os
+import smtplib
+import sys
+import time
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.dirname(sys.path[0]))))
+
+from mozharness.base.log import ERROR, INFO
+from mozharness.base.vcs.vcsbase import VCSScript
+
+
+# VCSSyncScript {{{1
+class VCSSyncScript(VCSScript):
+ start_time = time.time()
+
+ def __init__(self, **kwargs):
+ super(VCSSyncScript, self).__init__(**kwargs)
+
+ def notify(self, message=None, fatal=False):
+ """ Email people in the notify_config (depending on status and failure_only)
+ """
+ c = self.config
+ dirs = self.query_abs_dirs()
+ job_name = c.get('job_name', c.get('conversion_dir', os.getcwd()))
+ end_time = time.time()
+ seconds = int(end_time - self.start_time)
+ self.info("Job took %d seconds." % seconds)
+ subject = "[vcs2vcs] Successful conversion for %s" % job_name
+ text = ''
+ error_contents = ''
+ max_log_sample_size = c.get('email_max_log_sample_size') # default defined in vcs_sync.py
+ error_log = os.path.join(dirs['abs_log_dir'], self.log_obj.log_files[ERROR])
+ info_log = os.path.join(dirs['abs_log_dir'], self.log_obj.log_files[INFO])
+ if os.path.exists(error_log) and os.path.getsize(error_log) > 0:
+ error_contents = self.get_output_from_command(
+ ["egrep", "-C5", "^[0-9:]+ +(ERROR|CRITICAL|FATAL) -", info_log],
+ silent=True,
+ )
+ if fatal:
+ subject = "[vcs2vcs] Failed conversion for %s" % job_name
+ text = ''
+ if len(message) > max_log_sample_size:
+ text += '*** Message below has been truncated: it was %s characters, and has been reduced to %s characters:\n\n' % (len(message), max_log_sample_size)
+ text += message[0:max_log_sample_size] + '\n\n' # limit message to max_log_sample_size in size (large emails fail to send)
+ if not self.successful_repos:
+ subject = "[vcs2vcs] Successful no-op conversion for %s" % job_name
+ if error_contents and not fatal:
+ subject += " with warnings"
+ if self.successful_repos:
+ if len(self.successful_repos) <= 5:
+ subject += ' (' + ','.join(self.successful_repos) + ')'
+ else:
+ text += "Successful repos: %s\n\n" % ', '.join(self.successful_repos)
+ subject += ' (%ds)' % seconds
+ if self.summary_list:
+ text += 'Summary is non-zero:\n\n'
+ for item in self.summary_list:
+ text += '%s - %s\n' % (item['level'], item['message'])
+ if not fatal and error_contents and not self.summary_list:
+ text += 'Summary is empty; the below errors have probably been auto-corrected.\n\n'
+ if error_contents:
+ if len(error_contents) > max_log_sample_size:
+ text += '\n*** Message below has been truncated: it was %s characters, and has been reduced to %s characters:\n' % (len(error_contents), max_log_sample_size)
+ text += '\n%s\n\n' % error_contents[0:max_log_sample_size] # limit message to 100KB in size (large emails fail to send)
+ if not text:
+ subject += " <EOM>"
+ for notify_config in c.get('notify_config', []):
+ if not fatal:
+ if notify_config.get('failure_only'):
+ self.info("Skipping notification for %s (failure_only)" % notify_config['to'])
+ continue
+ if not text and notify_config.get('skip_empty_messages'):
+ self.info("Skipping notification for %s (skip_empty_messages)" % notify_config['to'])
+ continue
+ fromaddr = notify_config.get('from', c['default_notify_from'])
+ message = '\r\n'.join((
+ "From: %s" % fromaddr,
+ "To: %s" % notify_config['to'],
+ "CC: %s" % ','.join(notify_config.get('cc', [])),
+ "Subject: %s" % subject,
+ "",
+ text
+ ))
+ toaddrs = [notify_config['to']] + notify_config.get('cc', [])
+ # TODO allow for a different smtp server
+ # TODO deal with failures
+ server = smtplib.SMTP('localhost')
+ self.retry(
+ server.sendmail,
+ args=(fromaddr, toaddrs, message),
+ )
+ server.quit()
diff --git a/testing/mozharness/mozharness/lib/__init__.py b/testing/mozharness/mozharness/lib/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/lib/__init__.py
diff --git a/testing/mozharness/mozharness/lib/python/__init__.py b/testing/mozharness/mozharness/lib/python/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/lib/python/__init__.py
diff --git a/testing/mozharness/mozharness/lib/python/authentication.py b/testing/mozharness/mozharness/lib/python/authentication.py
new file mode 100644
index 000000000..2e5f83f37
--- /dev/null
+++ b/testing/mozharness/mozharness/lib/python/authentication.py
@@ -0,0 +1,53 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+"""module for http authentication operations"""
+import getpass
+import os
+
+CREDENTIALS_PATH = os.path.expanduser("~/.mozilla/credentials.cfg")
+DIRNAME = os.path.dirname(CREDENTIALS_PATH)
+LDAP_PASSWORD = None
+
+def get_credentials():
+ """ Returns http credentials.
+
+ The user's email address is stored on disk (for convenience in the future)
+ while the password is requested from the user on first invocation.
+ """
+ global LDAP_PASSWORD
+ if not os.path.exists(DIRNAME):
+ os.makedirs(DIRNAME)
+
+ if os.path.isfile(CREDENTIALS_PATH):
+ with open(CREDENTIALS_PATH, 'r') as file_handler:
+ content = file_handler.read().splitlines()
+
+ https_username = content[0].strip()
+
+ if len(content) > 1:
+ # We want to remove files which contain the password
+ os.remove(CREDENTIALS_PATH)
+ else:
+ https_username = \
+ raw_input("Please enter your full LDAP email address: ")
+
+ with open(CREDENTIALS_PATH, "w+") as file_handler:
+ file_handler.write("%s\n" % https_username)
+
+ os.chmod(CREDENTIALS_PATH, 0600)
+
+ if not LDAP_PASSWORD:
+ print "Please enter your LDAP password (we won't store it):"
+ LDAP_PASSWORD = getpass.getpass()
+
+ return https_username, LDAP_PASSWORD
+
+def get_credentials_path():
+ if os.path.isfile(CREDENTIALS_PATH):
+ get_credentials()
+
+ return CREDENTIALS_PATH
diff --git a/testing/mozharness/mozharness/mozilla/__init__.py b/testing/mozharness/mozharness/mozilla/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/__init__.py
diff --git a/testing/mozharness/mozharness/mozilla/aws.py b/testing/mozharness/mozharness/mozilla/aws.py
new file mode 100644
index 000000000..264c39037
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/aws.py
@@ -0,0 +1,11 @@
+import os
+
+
+def pop_aws_auth_from_env():
+ """
+ retrieves aws creds and deletes them from os.environ if present.
+ """
+ aws_key_id = os.environ.pop("AWS_ACCESS_KEY_ID", None)
+ aws_secret_key = os.environ.pop("AWS_SECRET_ACCESS_KEY", None)
+
+ return aws_key_id, aws_secret_key
diff --git a/testing/mozharness/mozharness/mozilla/blob_upload.py b/testing/mozharness/mozharness/mozilla/blob_upload.py
new file mode 100644
index 000000000..1607ddf99
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/blob_upload.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+
+from mozharness.base.python import VirtualenvMixin
+from mozharness.base.script import PostScriptRun
+
+blobupload_config_options = [
+ [["--blob-upload-branch"],
+ {"dest": "blob_upload_branch",
+ "help": "Branch for blob server's metadata",
+ }],
+ [["--blob-upload-server"],
+ {"dest": "blob_upload_servers",
+ "action": "extend",
+ "help": "Blob servers's location",
+ }]
+ ]
+
+
+class BlobUploadMixin(VirtualenvMixin):
+ """Provides mechanism to automatically upload files written in
+ MOZ_UPLOAD_DIR to the blobber upload server at the end of the
+ running script.
+
+ This is dependent on ScriptMixin and BuildbotMixin.
+ The testing script inheriting this class is to specify as cmdline
+ options the <blob-upload-branch> and <blob-upload-server>
+
+ """
+ def __init__(self, *args, **kwargs):
+ requirements = [
+ 'blobuploader==1.2.4',
+ ]
+ super(BlobUploadMixin, self).__init__(*args, **kwargs)
+ for req in requirements:
+ self.register_virtualenv_module(req, method='pip')
+
+ def upload_blobber_files(self):
+ self.debug("Check branch and server cmdline options.")
+ if self.config.get('blob_upload_branch') and \
+ (self.config.get('blob_upload_servers') or
+ self.config.get('default_blob_upload_servers')) and \
+ self.config.get('blob_uploader_auth_file'):
+
+ self.info("Blob upload gear active.")
+ upload = [self.query_python_path(), self.query_python_path("blobberc.py")]
+
+ dirs = self.query_abs_dirs()
+ self.debug("Get the directory from which to upload the files.")
+ if dirs.get('abs_blob_upload_dir'):
+ blob_dir = dirs['abs_blob_upload_dir']
+ else:
+ self.warning("Couldn't find the blob upload folder's path!")
+ return
+
+ if not os.path.isdir(blob_dir):
+ self.warning("Blob upload directory does not exist!")
+ return
+
+ if not os.listdir(blob_dir):
+ self.info("There are no files to upload in the directory. "
+ "Skipping the blob upload mechanism ...")
+ return
+
+ self.info("Preparing to upload files from %s." % blob_dir)
+ auth_file = self.config.get('blob_uploader_auth_file')
+ if not os.path.isfile(auth_file):
+ self.warning("Could not find the credentials files!")
+ return
+ blob_branch = self.config.get('blob_upload_branch')
+ blob_servers_list = self.config.get('blob_upload_servers',
+ self.config.get('default_blob_upload_servers'))
+
+ servers = []
+ for server in blob_servers_list:
+ servers.extend(['-u', server])
+ auth = ['-a', auth_file]
+ branch = ['-b', blob_branch]
+ dir_to_upload = ['-d', blob_dir]
+ # We want blobberc to tell us if a summary file was uploaded through this manifest file
+ manifest_path = os.path.join(dirs['abs_work_dir'], 'uploaded_files.json')
+ record_uploaded_files = ['--output-manifest', manifest_path]
+ self.info("Files from %s are to be uploaded with <%s> branch at "
+ "the following location(s): %s" % (blob_dir, blob_branch,
+ ", ".join(["%s" % s for s in blob_servers_list])))
+
+ # call blob client to upload files to server
+ self.run_command(upload + servers + auth + branch + dir_to_upload + record_uploaded_files)
+
+ uploaded_files = '{}'
+ if os.path.isfile(manifest_path):
+ with open(manifest_path, 'r') as f:
+ uploaded_files = f.read()
+ self.rmtree(manifest_path)
+
+ self.set_buildbot_property(prop_name='blobber_files',
+ prop_value=uploaded_files, write_to_file=True)
+ else:
+ self.warning("Blob upload gear skipped. Missing cmdline options.")
+
+ @PostScriptRun
+ def _upload_blobber_files(self):
+ self.upload_blobber_files()
diff --git a/testing/mozharness/mozharness/mozilla/bouncer/__init__.py b/testing/mozharness/mozharness/mozilla/bouncer/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/bouncer/__init__.py
diff --git a/testing/mozharness/mozharness/mozilla/bouncer/submitter.py b/testing/mozharness/mozharness/mozilla/bouncer/submitter.py
new file mode 100644
index 000000000..43983dca8
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/bouncer/submitter.py
@@ -0,0 +1,114 @@
+import base64
+import httplib
+import socket
+import sys
+import traceback
+import urllib
+import urllib2
+from xml.dom.minidom import parseString
+
+from mozharness.base.log import FATAL
+
+
+class BouncerSubmitterMixin(object):
+ def query_credentials(self):
+ if self.credentials:
+ return self.credentials
+ global_dict = {}
+ local_dict = {}
+ execfile(self.config["credentials_file"], global_dict, local_dict)
+ self.credentials = (local_dict["tuxedoUsername"],
+ local_dict["tuxedoPassword"])
+ return self.credentials
+
+ def api_call(self, route, data, error_level=FATAL, retry_config=None):
+ retry_args = dict(
+ failure_status=None,
+ retry_exceptions=(urllib2.HTTPError, urllib2.URLError,
+ httplib.BadStatusLine,
+ socket.timeout, socket.error),
+ error_message="call to %s failed" % (route),
+ error_level=error_level,
+ )
+
+ if retry_config:
+ retry_args.update(retry_config)
+
+ return self.retry(
+ self._api_call,
+ args=(route, data),
+ **retry_args
+ )
+
+ def _api_call(self, route, data):
+ api_prefix = self.config["bouncer-api-prefix"]
+ api_url = "%s/%s" % (api_prefix, route)
+ request = urllib2.Request(api_url)
+ if data:
+ post_data = urllib.urlencode(data, doseq=True)
+ request.add_data(post_data)
+ self.info("POST data: %s" % post_data)
+ credentials = self.query_credentials()
+ if credentials:
+ auth = base64.encodestring('%s:%s' % credentials)
+ request.add_header("Authorization", "Basic %s" % auth.strip())
+ try:
+ self.info("Submitting to %s" % api_url)
+ res = urllib2.urlopen(request, timeout=60).read()
+ self.info("Server response")
+ self.info(res)
+ return res
+ except urllib2.HTTPError as e:
+ self.warning("Cannot access %s" % api_url)
+ traceback.print_exc(file=sys.stdout)
+ self.warning("Returned page source:")
+ self.warning(e.read())
+ raise
+ except urllib2.URLError:
+ traceback.print_exc(file=sys.stdout)
+ self.warning("Cannot access %s" % api_url)
+ raise
+ except socket.timeout as e:
+ self.warning("Timed out accessing %s: %s" % (api_url, e))
+ raise
+ except socket.error as e:
+ self.warning("Socket error when accessing %s: %s" % (api_url, e))
+ raise
+ except httplib.BadStatusLine as e:
+ self.warning('BadStatusLine accessing %s: %s' % (api_url, e))
+ raise
+
+ def product_exists(self, product_name):
+ self.info("Checking if %s already exists" % product_name)
+ res = self.api_call("product_show?product=%s" %
+ urllib.quote(product_name), data=None)
+ try:
+ xml = parseString(res)
+ # API returns <products/> if the product doesn't exist
+ products_found = len(xml.getElementsByTagName("product"))
+ self.info("Products found: %s" % products_found)
+ return bool(products_found)
+ except Exception as e:
+ self.warning("Error parsing XML: %s" % e)
+ self.warning("Assuming %s does not exist" % product_name)
+ # ignore XML parsing errors
+ return False
+
+ def api_add_product(self, product_name, add_locales, ssl_only=False):
+ data = {
+ "product": product_name,
+ }
+ if self.locales and add_locales:
+ data["languages"] = self.locales
+ if ssl_only:
+ # Send "true" as a string
+ data["ssl_only"] = "true"
+ self.api_call("product_add/", data)
+
+ def api_add_location(self, product_name, bouncer_platform, path):
+ data = {
+ "product": product_name,
+ "os": bouncer_platform,
+ "path": path,
+ }
+ self.api_call("location_add/", data)
diff --git a/testing/mozharness/mozharness/mozilla/buildbot.py b/testing/mozharness/mozharness/mozilla/buildbot.py
new file mode 100755
index 000000000..e17343633
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/buildbot.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Code to tie into buildbot.
+Ideally this will go away if and when we retire buildbot.
+"""
+
+import copy
+import os
+import re
+import sys
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.config import parse_config_file
+from mozharness.base.log import INFO, WARNING, ERROR
+
+# BuildbotMixin {{{1
+
+TBPL_SUCCESS = 'SUCCESS'
+TBPL_WARNING = 'WARNING'
+TBPL_FAILURE = 'FAILURE'
+TBPL_EXCEPTION = 'EXCEPTION'
+TBPL_RETRY = 'RETRY'
+TBPL_STATUS_DICT = {
+ TBPL_SUCCESS: INFO,
+ TBPL_WARNING: WARNING,
+ TBPL_FAILURE: ERROR,
+ TBPL_EXCEPTION: ERROR,
+ TBPL_RETRY: WARNING,
+}
+EXIT_STATUS_DICT = {
+ TBPL_SUCCESS: 0,
+ TBPL_WARNING: 1,
+ TBPL_FAILURE: 2,
+ TBPL_EXCEPTION: 3,
+ TBPL_RETRY: 4,
+}
+TBPL_WORST_LEVEL_TUPLE = (TBPL_RETRY, TBPL_EXCEPTION, TBPL_FAILURE,
+ TBPL_WARNING, TBPL_SUCCESS)
+
+
+class BuildbotMixin(object):
+ buildbot_config = None
+ buildbot_properties = {}
+ worst_buildbot_status = TBPL_SUCCESS
+
+ def read_buildbot_config(self):
+ c = self.config
+ if not c.get("buildbot_json_path"):
+ # If we need to fail out, add postflight_read_buildbot_config()
+ self.info("buildbot_json_path is not set. Skipping...")
+ else:
+ # TODO try/except?
+ self.buildbot_config = parse_config_file(c['buildbot_json_path'])
+ buildbot_properties = copy.deepcopy(self.buildbot_config.get('properties', {}))
+ if 'commit_titles' in buildbot_properties:
+ # Remove the commit messages since they can cause false positives with
+ # Treeherder log parsers. Eg: "Bug X - Fix TEST-UNEPXECTED-FAIL ...".
+ del buildbot_properties['commit_titles']
+ self.info("Using buildbot properties:")
+ self.info(json.dumps(buildbot_properties, indent=4))
+
+ def tryserver_email(self):
+ pass
+
+ def buildbot_status(self, tbpl_status, level=None, set_return_code=True):
+ if tbpl_status not in TBPL_STATUS_DICT:
+ self.error("buildbot_status() doesn't grok the status %s!" % tbpl_status)
+ else:
+ # Set failure if our log > buildbot_max_log_size (bug 876159)
+ if self.config.get("buildbot_max_log_size") and self.log_obj:
+ # Find the path to the default log
+ dirs = self.query_abs_dirs()
+ log_file = os.path.join(
+ dirs['abs_log_dir'],
+ self.log_obj.log_files[self.log_obj.log_level]
+ )
+ if os.path.exists(log_file):
+ file_size = os.path.getsize(log_file)
+ if file_size > self.config['buildbot_max_log_size']:
+ self.error("Log file size %d is greater than max allowed %d! Setting TBPL_FAILURE (was %s)..." % (file_size, self.config['buildbot_max_log_size'], tbpl_status))
+ tbpl_status = TBPL_FAILURE
+ if not level:
+ level = TBPL_STATUS_DICT[tbpl_status]
+ self.worst_buildbot_status = self.worst_level(tbpl_status, self.worst_buildbot_status, TBPL_WORST_LEVEL_TUPLE)
+ if self.worst_buildbot_status != tbpl_status:
+ self.info("Current worst status %s is worse; keeping it." % self.worst_buildbot_status)
+ self.add_summary("# TBPL %s #" % self.worst_buildbot_status, level=level)
+ if set_return_code:
+ self.return_code = EXIT_STATUS_DICT[self.worst_buildbot_status]
+
+ def set_buildbot_property(self, prop_name, prop_value, write_to_file=False):
+ self.info("Setting buildbot property %s to %s" % (prop_name, prop_value))
+ self.buildbot_properties[prop_name] = prop_value
+ if write_to_file:
+ return self.dump_buildbot_properties(prop_list=[prop_name], file_name=prop_name)
+ return self.buildbot_properties[prop_name]
+
+ def query_buildbot_property(self, prop_name):
+ return self.buildbot_properties.get(prop_name)
+
+ def query_is_nightly(self):
+ """returns whether or not the script should run as a nightly build.
+
+ First will check for 'nightly_build' in self.config and if that is
+ not True, we will also allow buildbot_config to determine
+ for us. Failing all of that, we default to False.
+ Note, dependancy on buildbot_config is being deprecated.
+ Putting everything in self.config is the preference.
+ """
+ if self.config.get('nightly_build'):
+ return True
+ elif self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties'].get('nightly_build', False)
+ else:
+ return False
+
+ def dump_buildbot_properties(self, prop_list=None, file_name="properties", error_level=ERROR):
+ c = self.config
+ if not os.path.isabs(file_name):
+ file_name = os.path.join(c['base_work_dir'], "properties", file_name)
+ dir_name = os.path.dirname(file_name)
+ if not os.path.isdir(dir_name):
+ self.mkdir_p(dir_name)
+ if not prop_list:
+ prop_list = self.buildbot_properties.keys()
+ self.info("Writing buildbot properties to %s" % file_name)
+ else:
+ if not isinstance(prop_list, (list, tuple)):
+ self.log("dump_buildbot_properties: Can't dump non-list prop_list %s!" % str(prop_list), level=error_level)
+ return
+ self.info("Writing buildbot properties %s to %s" % (str(prop_list), file_name))
+ contents = ""
+ for prop in prop_list:
+ contents += "%s:%s\n" % (prop, self.buildbot_properties.get(prop, "None"))
+ return self.write_to_file(file_name, contents)
+
+ def invoke_sendchange(self, downloadables=None, branch=None,
+ username="sendchange-unittest", sendchange_props=None):
+ """ Generic sendchange, currently b2g- and unittest-specific.
+ """
+ c = self.config
+ buildbot = self.query_exe("buildbot", return_type="list")
+ if branch is None:
+ if c.get("debug_build"):
+ platform = re.sub('[_-]debug', '', self.buildbot_config["properties"]["platform"])
+ branch = '%s-%s-debug-unittest' % (self.buildbot_config["properties"]["branch"], platform)
+ else:
+ branch = '%s-%s-opt-unittest' % (self.buildbot_config["properties"]["branch"], self.buildbot_config["properties"]["platform"])
+ sendchange = [
+ 'sendchange',
+ '--master', c.get("sendchange_masters")[0],
+ '--username', username,
+ '--branch', branch,
+ ]
+ if self.buildbot_config['sourcestamp'].get("revision"):
+ sendchange += ['-r', self.buildbot_config['sourcestamp']["revision"]]
+ if len(self.buildbot_config['sourcestamp']['changes']) > 0:
+ if self.buildbot_config['sourcestamp']['changes'][0].get('who'):
+ sendchange += ['--username', self.buildbot_config['sourcestamp']['changes'][0]['who']]
+ if self.buildbot_config['sourcestamp']['changes'][0].get('comments'):
+ sendchange += ['--comments', self.buildbot_config['sourcestamp']['changes'][0]['comments'].encode('ascii', 'ignore')]
+ if sendchange_props:
+ for key, value in sendchange_props.iteritems():
+ sendchange.extend(['--property', '%s:%s' % (key, value)])
+ else:
+ if self.buildbot_config["properties"].get("builduid"):
+ sendchange += ['--property', "builduid:%s" % self.buildbot_config["properties"]["builduid"]]
+ sendchange += [
+ '--property', "buildid:%s" % self.query_buildid(),
+ '--property', 'pgo_build:False',
+ ]
+
+ for d in downloadables:
+ sendchange += [d]
+
+ retcode = self.run_command(buildbot + sendchange)
+ if retcode != 0:
+ self.info("The sendchange failed but we don't want to turn the build orange: %s" % retcode)
+
+ def query_build_name(self):
+ build_name = self.config.get('platform')
+ if not build_name:
+ self.fatal('Must specify "platform" in the mozharness config for indexing')
+
+ return build_name
+
+ def query_build_type(self):
+ if self.config.get('build_type'):
+ build_type = self.config['build_type']
+ elif self.config.get('pgo_build'):
+ build_type = 'pgo'
+ elif self.config.get('debug_build', False):
+ build_type = 'debug'
+ else:
+ build_type = 'opt'
+ return build_type
+
+ def buildid_to_dict(self, buildid):
+ """Returns an dict with the year, month, day, hour, minute, and second
+ as keys, as parsed from the buildid"""
+ buildidDict = {}
+ try:
+ # strptime is no good here because it strips leading zeros
+ buildidDict['year'] = buildid[0:4]
+ buildidDict['month'] = buildid[4:6]
+ buildidDict['day'] = buildid[6:8]
+ buildidDict['hour'] = buildid[8:10]
+ buildidDict['minute'] = buildid[10:12]
+ buildidDict['second'] = buildid[12:14]
+ except:
+ self.fatal('Could not parse buildid into YYYYMMDDHHMMSS: %s' % buildid)
+ return buildidDict
+
+ def query_who(self):
+ """ looks for who triggered the build with a change.
+
+ This is used for things like try builds where the upload dir is
+ associated with who pushed to try. First it will look in self.config
+ and failing that, will poll buildbot_config
+ If nothing is found, it will default to returning "nobody@example.com"
+ """
+ if self.config.get('who'):
+ return self.config['who']
+ self.read_buildbot_config()
+ try:
+ return self.buildbot_config['sourcestamp']['changes'][0]['who']
+ except (KeyError, IndexError):
+ # KeyError: "sourcestamp" or "changes" or "who" not in buildbot_config
+ # IndexError: buildbot_config['sourcestamp']['changes'] is empty
+ pass
+ try:
+ return str(self.buildbot_config['properties']['who'])
+ except KeyError:
+ pass
+ return "nobody@example.com"
diff --git a/testing/mozharness/mozharness/mozilla/building/__init__.py b/testing/mozharness/mozharness/mozilla/building/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/building/__init__.py
diff --git a/testing/mozharness/mozharness/mozilla/building/buildbase.py b/testing/mozharness/mozharness/mozilla/building/buildbase.py
new file mode 100755
index 000000000..8a2e172cb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/building/buildbase.py
@@ -0,0 +1,2155 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" buildbase.py.
+
+provides a base class for fx desktop builds
+author: Jordan Lund
+
+"""
+import json
+
+import os
+import pprint
+import subprocess
+import time
+import uuid
+import copy
+import glob
+import shlex
+from itertools import chain
+
+# import the power of mozharness ;)
+import sys
+from datetime import datetime
+import re
+from mozharness.base.config import BaseConfig, parse_config_file
+from mozharness.base.log import ERROR, OutputParser, FATAL
+from mozharness.base.script import PostScriptRun
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import (
+ BuildbotMixin,
+ EXIT_STATUS_DICT,
+ TBPL_STATUS_DICT,
+ TBPL_EXCEPTION,
+ TBPL_FAILURE,
+ TBPL_RETRY,
+ TBPL_WARNING,
+ TBPL_SUCCESS,
+ TBPL_WORST_LEVEL_TUPLE,
+)
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.secrets import SecretsMixin
+from mozharness.mozilla.signing import SigningMixin
+from mozharness.mozilla.mock import ERROR_MSGS as MOCK_ERROR_MSGS
+from mozharness.mozilla.testing.errors import TinderBoxPrintRe
+from mozharness.mozilla.testing.unittest import tbox_print_summary
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.mozilla.taskcluster_helper import Taskcluster
+from mozharness.base.python import (
+ PerfherderResourceOptionsMixin,
+ VirtualenvMixin,
+)
+
+AUTOMATION_EXIT_CODES = EXIT_STATUS_DICT.values()
+AUTOMATION_EXIT_CODES.sort()
+
+MISSING_CFG_KEY_MSG = "The key '%s' could not be determined \
+Please add this to your config."
+
+ERROR_MSGS = {
+ 'undetermined_repo_path': 'The repo could not be determined. \
+Please make sure that either "repo" is in your config or, if \
+you are running this in buildbot, "repo_path" is in your buildbot_config.',
+ 'comments_undetermined': '"comments" could not be determined. This may be \
+because it was a forced build.',
+ 'tooltool_manifest_undetermined': '"tooltool_manifest_src" not set, \
+Skipping run_tooltool...',
+}
+ERROR_MSGS.update(MOCK_ERROR_MSGS)
+
+
+### Output Parsers
+
+TBPL_UPLOAD_ERRORS = [
+ {
+ 'regex': re.compile("Connection timed out"),
+ 'level': TBPL_RETRY,
+ },
+ {
+ 'regex': re.compile("Connection reset by peer"),
+ 'level': TBPL_RETRY,
+ },
+ {
+ 'regex': re.compile("Connection refused"),
+ 'level': TBPL_RETRY,
+ }
+]
+
+
+class MakeUploadOutputParser(OutputParser):
+ tbpl_error_list = TBPL_UPLOAD_ERRORS
+ # let's create a switch case using name-spaces/dict
+ # rather than a long if/else with duplicate code
+ property_conditions = [
+ # key: property name, value: condition
+ ('symbolsUrl', "m.endswith('crashreporter-symbols.zip') or "
+ "m.endswith('crashreporter-symbols-full.zip')"),
+ ('testsUrl', "m.endswith(('tests.tar.bz2', 'tests.zip'))"),
+ ('robocopApkUrl', "m.endswith('apk') and 'robocop' in m"),
+ ('jsshellUrl', "'jsshell-' in m and m.endswith('.zip')"),
+ ('partialMarUrl', "m.endswith('.mar') and '.partial.' in m"),
+ ('completeMarUrl', "m.endswith('.mar')"),
+ ('codeCoverageUrl', "m.endswith('code-coverage-gcno.zip')"),
+ ]
+
+ def __init__(self, use_package_as_marfile=False, package_filename=None, **kwargs):
+ super(MakeUploadOutputParser, self).__init__(**kwargs)
+ self.matches = {}
+ self.tbpl_status = TBPL_SUCCESS
+ self.use_package_as_marfile = use_package_as_marfile
+ self.package_filename = package_filename
+
+ def parse_single_line(self, line):
+ prop_assigned = False
+ pat = r'''^(https?://.*?\.(?:tar\.bz2|dmg|zip|apk|rpm|mar|tar\.gz))$'''
+ m = re.compile(pat).match(line)
+ if m:
+ m = m.group(1)
+ for prop, condition in self.property_conditions:
+ if eval(condition):
+ self.matches[prop] = m
+ prop_assigned = True
+ break
+ if not prop_assigned:
+ # if we found a match but haven't identified the prop then this
+ # is the packageURL. Alternatively, if we already know the
+ # package filename, then use that explicitly so we don't pick up
+ # just any random file and assume it's the package.
+ if not self.package_filename or m.endswith(self.package_filename):
+ self.matches['packageUrl'] = m
+
+ # For android builds, the package is also used as the mar file.
+ # Grab the first one, since that is the one in the
+ # nightly/YYYY/MM directory
+ if self.use_package_as_marfile:
+ if 'tinderbox-builds' in m or 'nightly/latest-' in m:
+ self.info("Skipping wrong packageUrl: %s" % m)
+ else:
+ if 'completeMarUrl' in self.matches:
+ self.fatal("Found multiple package URLs. Please update buildbase.py")
+ self.info("Using package as mar file: %s" % m)
+ self.matches['completeMarUrl'] = m
+ u, self.package_filename = os.path.split(m)
+
+ if self.use_package_as_marfile and self.package_filename:
+ # The checksum file is also dumped during 'make upload'. Look
+ # through here to get the hash and filesize of the android package
+ # for balrog submission.
+ pat = r'''^([^ ]*) sha512 ([0-9]*) %s$''' % self.package_filename
+ m = re.compile(pat).match(line)
+ if m:
+ self.matches['completeMarHash'] = m.group(1)
+ self.matches['completeMarSize'] = m.group(2)
+ self.info("Using package as mar file and found package hash=%s size=%s" % (m.group(1), m.group(2)))
+
+ # now let's check for retry errors which will give log levels:
+ # tbpl status as RETRY and mozharness status as WARNING
+ for error_check in self.tbpl_error_list:
+ if error_check['regex'].search(line):
+ self.num_warnings += 1
+ self.warning(line)
+ self.tbpl_status = self.worst_level(
+ error_check['level'], self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE
+ )
+ break
+ else:
+ self.info(line)
+
+
+class CheckTestCompleteParser(OutputParser):
+ tbpl_error_list = TBPL_UPLOAD_ERRORS
+
+ def __init__(self, **kwargs):
+ self.matches = {}
+ super(CheckTestCompleteParser, self).__init__(**kwargs)
+ self.pass_count = 0
+ self.fail_count = 0
+ self.leaked = False
+ self.harness_err_re = TinderBoxPrintRe['harness_error']['full_regex']
+ self.tbpl_status = TBPL_SUCCESS
+
+ def parse_single_line(self, line):
+ # Counts and flags.
+ # Regular expression for crash and leak detections.
+ if "TEST-PASS" in line:
+ self.pass_count += 1
+ return self.info(line)
+ if "TEST-UNEXPECTED-" in line:
+ # Set the error flags.
+ # Or set the failure count.
+ m = self.harness_err_re.match(line)
+ if m:
+ r = m.group(1)
+ if r == "missing output line for total leaks!":
+ self.leaked = None
+ else:
+ self.leaked = True
+ self.fail_count += 1
+ return self.warning(line)
+ self.info(line) # else
+
+ def evaluate_parser(self, return_code, success_codes=None):
+ success_codes = success_codes or [0]
+
+ if self.num_errors: # ran into a script error
+ self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ if self.fail_count > 0:
+ self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ # Account for the possibility that no test summary was output.
+ if self.pass_count == 0 and self.fail_count == 0:
+ self.error('No tests run or test summary not found')
+ self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ if return_code not in success_codes:
+ self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ # Print the summary.
+ summary = tbox_print_summary(self.pass_count,
+ self.fail_count,
+ self.leaked)
+ self.info("TinderboxPrint: check<br/>%s\n" % summary)
+
+ return self.tbpl_status
+
+
+class BuildingConfig(BaseConfig):
+ # TODO add nosetests for this class
+ def get_cfgs_from_files(self, all_config_files, options):
+ """
+ Determine the configuration from the normal options and from
+ `--branch`, `--build-pool`, and `--custom-build-variant-cfg`. If the
+ files for any of the latter options are also given with `--config-file`
+ or `--opt-config-file`, they are only parsed once.
+
+ The build pool has highest precedence, followed by branch, build
+ variant, and any normally-specified configuration files.
+ """
+ # override from BaseConfig
+
+ # this is what we will return. It will represent each config
+ # file name and its associated dict
+ # eg ('builds/branch_specifics.py', {'foo': 'bar'})
+ all_config_dicts = []
+ # important config files
+ variant_cfg_file = branch_cfg_file = pool_cfg_file = ''
+
+ # we want to make the order in which the options were given
+ # not matter. ie: you can supply --branch before --build-pool
+ # or vice versa and the hierarchy will not be different
+
+ #### The order from highest precedence to lowest is:
+ ## There can only be one of these...
+ # 1) build_pool: this can be either staging, pre-prod, and prod cfgs
+ # 2) branch: eg: mozilla-central, cedar, cypress, etc
+ # 3) build_variant: these could be known like asan and debug
+ # or a custom config
+ ##
+ ## There can be many of these
+ # 4) all other configs: these are any configs that are passed with
+ # --cfg and --opt-cfg. There order is kept in
+ # which they were passed on the cmd line. This
+ # behaviour is maintains what happens by default
+ # in mozharness
+ ##
+ ####
+
+ # so, let's first assign the configs that hold a known position of
+ # importance (1 through 3)
+ for i, cf in enumerate(all_config_files):
+ if options.build_pool:
+ if cf == BuildOptionParser.build_pool_cfg_file:
+ pool_cfg_file = all_config_files[i]
+
+ if cf == BuildOptionParser.branch_cfg_file:
+ branch_cfg_file = all_config_files[i]
+
+ if cf == options.build_variant:
+ variant_cfg_file = all_config_files[i]
+
+ # now remove these from the list if there was any.
+ # we couldn't pop() these in the above loop as mutating a list while
+ # iterating through it causes spurious results :)
+ for cf in [pool_cfg_file, branch_cfg_file, variant_cfg_file]:
+ if cf:
+ all_config_files.remove(cf)
+
+ # now let's update config with the remaining config files.
+ # this functionality is the same as the base class
+ all_config_dicts.extend(
+ super(BuildingConfig, self).get_cfgs_from_files(all_config_files,
+ options)
+ )
+
+ # stack variant, branch, and pool cfg files on top of that,
+ # if they are present, in that order
+ if variant_cfg_file:
+ # take the whole config
+ all_config_dicts.append(
+ (variant_cfg_file, parse_config_file(variant_cfg_file))
+ )
+ if branch_cfg_file:
+ # take only the specific branch, if present
+ branch_configs = parse_config_file(branch_cfg_file)
+ if branch_configs.get(options.branch or ""):
+ all_config_dicts.append(
+ (branch_cfg_file, branch_configs[options.branch])
+ )
+ if pool_cfg_file:
+ # take only the specific pool. If we are here, the pool
+ # must be present
+ build_pool_configs = parse_config_file(pool_cfg_file)
+ all_config_dicts.append(
+ (pool_cfg_file, build_pool_configs[options.build_pool])
+ )
+ return all_config_dicts
+
+
+# noinspection PyUnusedLocal
+class BuildOptionParser(object):
+ # TODO add nosetests for this class
+ platform = None
+ bits = None
+ config_file_search_path = [
+ '.', os.path.join(sys.path[0], '..', 'configs'),
+ os.path.join(sys.path[0], '..', '..', 'configs')
+ ]
+
+ # add to this list and you can automagically do things like
+ # --custom-build-variant-cfg asan
+ # and the script will pull up the appropriate path for the config
+ # against the current platform and bits.
+ # *It will warn and fail if there is not a config for the current
+ # platform/bits
+ build_variants = {
+ 'add-on-devel': 'builds/releng_sub_%s_configs/%s_add-on-devel.py',
+ 'asan': 'builds/releng_sub_%s_configs/%s_asan.py',
+ 'asan-tc': 'builds/releng_sub_%s_configs/%s_asan_tc.py',
+ 'tsan': 'builds/releng_sub_%s_configs/%s_tsan.py',
+ 'cross-debug': 'builds/releng_sub_%s_configs/%s_cross_debug.py',
+ 'cross-opt': 'builds/releng_sub_%s_configs/%s_cross_opt.py',
+ 'cross-universal': 'builds/releng_sub_%s_configs/%s_cross_universal.py',
+ 'debug': 'builds/releng_sub_%s_configs/%s_debug.py',
+ 'asan-and-debug': 'builds/releng_sub_%s_configs/%s_asan_and_debug.py',
+ 'asan-tc-and-debug': 'builds/releng_sub_%s_configs/%s_asan_tc_and_debug.py',
+ 'stat-and-debug': 'builds/releng_sub_%s_configs/%s_stat_and_debug.py',
+ 'code-coverage': 'builds/releng_sub_%s_configs/%s_code_coverage.py',
+ 'source': 'builds/releng_sub_%s_configs/%s_source.py',
+ 'api-15-gradle-dependencies': 'builds/releng_sub_%s_configs/%s_api_15_gradle_dependencies.py',
+ 'api-15': 'builds/releng_sub_%s_configs/%s_api_15.py',
+ 'api-15-debug': 'builds/releng_sub_%s_configs/%s_api_15_debug.py',
+ 'api-15-gradle': 'builds/releng_sub_%s_configs/%s_api_15_gradle.py',
+ 'x86': 'builds/releng_sub_%s_configs/%s_x86.py',
+ 'api-15-partner-sample1': 'builds/releng_sub_%s_configs/%s_api_15_partner_sample1.py',
+ 'android-test': 'builds/releng_sub_%s_configs/%s_test.py',
+ 'android-checkstyle': 'builds/releng_sub_%s_configs/%s_checkstyle.py',
+ 'android-lint': 'builds/releng_sub_%s_configs/%s_lint.py',
+ 'valgrind' : 'builds/releng_sub_%s_configs/%s_valgrind.py',
+ 'artifact': 'builds/releng_sub_%s_configs/%s_artifact.py',
+ 'debug-artifact': 'builds/releng_sub_%s_configs/%s_debug_artifact.py',
+ }
+ build_pool_cfg_file = 'builds/build_pool_specifics.py'
+ branch_cfg_file = 'builds/branch_specifics.py'
+
+ @classmethod
+ def _query_pltfrm_and_bits(cls, target_option, options):
+ """ determine platform and bits
+
+ This can be from either from a supplied --platform and --bits
+ or parsed from given config file names.
+ """
+ error_msg = (
+ 'Whoops!\nYou are trying to pass a shortname for '
+ '%s. \nHowever, I need to know the %s to find the appropriate '
+ 'filename. You can tell me by passing:\n\t"%s" or a config '
+ 'filename via "--config" with %s in it. \nIn either case, these '
+ 'option arguments must come before --custom-build-variant.'
+ )
+ current_config_files = options.config_files or []
+ if not cls.bits:
+ # --bits has not been supplied
+ # lets parse given config file names for 32 or 64
+ for cfg_file_name in current_config_files:
+ if '32' in cfg_file_name:
+ cls.bits = '32'
+ break
+ if '64' in cfg_file_name:
+ cls.bits = '64'
+ break
+ else:
+ sys.exit(error_msg % (target_option, 'bits', '--bits',
+ '"32" or "64"'))
+
+ if not cls.platform:
+ # --platform has not been supplied
+ # lets parse given config file names for platform
+ for cfg_file_name in current_config_files:
+ if 'windows' in cfg_file_name:
+ cls.platform = 'windows'
+ break
+ if 'mac' in cfg_file_name:
+ cls.platform = 'mac'
+ break
+ if 'linux' in cfg_file_name:
+ cls.platform = 'linux'
+ break
+ if 'android' in cfg_file_name:
+ cls.platform = 'android'
+ break
+ else:
+ sys.exit(error_msg % (target_option, 'platform', '--platform',
+ '"linux", "windows", "mac", or "android"'))
+ return cls.bits, cls.platform
+
+ @classmethod
+ def find_variant_cfg_path(cls, opt, value, parser):
+ valid_variant_cfg_path = None
+ # first let's see if we were given a valid short-name
+ if cls.build_variants.get(value):
+ bits, pltfrm = cls._query_pltfrm_and_bits(opt, parser.values)
+ prospective_cfg_path = cls.build_variants[value] % (pltfrm, bits)
+ else:
+ # this is either an incomplete path or an invalid key in
+ # build_variants
+ prospective_cfg_path = value
+
+ if os.path.exists(prospective_cfg_path):
+ # now let's see if we were given a valid pathname
+ valid_variant_cfg_path = value
+ else:
+ # let's take our prospective_cfg_path and see if we can
+ # determine an existing file
+ for path in cls.config_file_search_path:
+ if os.path.exists(os.path.join(path, prospective_cfg_path)):
+ # success! we found a config file
+ valid_variant_cfg_path = os.path.join(path,
+ prospective_cfg_path)
+ break
+ return valid_variant_cfg_path, prospective_cfg_path
+
+ @classmethod
+ def set_build_variant(cls, option, opt, value, parser):
+ """ sets an extra config file.
+
+ This is done by either taking an existing filepath or by taking a valid
+ shortname coupled with known platform/bits.
+ """
+ valid_variant_cfg_path, prospective_cfg_path = cls.find_variant_cfg_path(
+ '--custom-build-variant-cfg', value, parser)
+
+ if not valid_variant_cfg_path:
+ # either the value was an indeterminable path or an invalid short
+ # name
+ sys.exit("Whoops!\n'--custom-build-variant' was passed but an "
+ "appropriate config file could not be determined. Tried "
+ "using: '%s' but it was either not:\n\t-- a valid "
+ "shortname: %s \n\t-- a valid path in %s \n\t-- a "
+ "valid variant for the given platform and bits." % (
+ prospective_cfg_path,
+ str(cls.build_variants.keys()),
+ str(cls.config_file_search_path)))
+ parser.values.config_files.append(valid_variant_cfg_path)
+ setattr(parser.values, option.dest, value) # the pool
+
+ @classmethod
+ def set_build_pool(cls, option, opt, value, parser):
+ # first let's add the build pool file where there may be pool
+ # specific keys/values. Then let's store the pool name
+ parser.values.config_files.append(cls.build_pool_cfg_file)
+ setattr(parser.values, option.dest, value) # the pool
+
+ @classmethod
+ def set_build_branch(cls, option, opt, value, parser):
+ # first let's add the branch_specific file where there may be branch
+ # specific keys/values. Then let's store the branch name we are using
+ parser.values.config_files.append(cls.branch_cfg_file)
+ setattr(parser.values, option.dest, value) # the branch name
+
+ @classmethod
+ def set_platform(cls, option, opt, value, parser):
+ cls.platform = value
+ setattr(parser.values, option.dest, value)
+
+ @classmethod
+ def set_bits(cls, option, opt, value, parser):
+ cls.bits = value
+ setattr(parser.values, option.dest, value)
+
+
+# this global depends on BuildOptionParser and therefore can not go at the
+# top of the file
+BUILD_BASE_CONFIG_OPTIONS = [
+ [['--developer-run', '--skip-buildbot-actions'], {
+ "action": "store_false",
+ "dest": "is_automation",
+ "default": True,
+ "help": "If this is running outside of Mozilla's build"
+ "infrastructure, use this option. It ignores actions"
+ "that are not needed and adds config checks."}],
+ [['--platform'], {
+ "action": "callback",
+ "callback": BuildOptionParser.set_platform,
+ "type": "string",
+ "dest": "platform",
+ "help": "Sets the platform we are running this against"
+ " valid values: 'windows', 'mac', 'linux'"}],
+ [['--bits'], {
+ "action": "callback",
+ "callback": BuildOptionParser.set_bits,
+ "type": "string",
+ "dest": "bits",
+ "help": "Sets which bits we are building this against"
+ " valid values: '32', '64'"}],
+ [['--custom-build-variant-cfg'], {
+ "action": "callback",
+ "callback": BuildOptionParser.set_build_variant,
+ "type": "string",
+ "dest": "build_variant",
+ "help": "Sets the build type and will determine appropriate"
+ " additional config to use. Either pass a config path"
+ " or use a valid shortname from: "
+ "%s" % (BuildOptionParser.build_variants.keys(),)}],
+ [['--build-pool'], {
+ "action": "callback",
+ "callback": BuildOptionParser.set_build_pool,
+ "type": "string",
+ "dest": "build_pool",
+ "help": "This will update the config with specific pool"
+ " environment keys/values. The dicts for this are"
+ " in %s\nValid values: staging or"
+ " production" % ('builds/build_pool_specifics.py',)}],
+ [['--branch'], {
+ "action": "callback",
+ "callback": BuildOptionParser.set_build_branch,
+ "type": "string",
+ "dest": "branch",
+ "help": "This sets the branch we will be building this for."
+ " If this branch is in branch_specifics.py, update our"
+ " config with specific keys/values from that. See"
+ " %s for possibilites" % (
+ BuildOptionParser.branch_cfg_file,
+ )}],
+ [['--scm-level'], {
+ "action": "store",
+ "type": "int",
+ "dest": "scm_level",
+ "default": 1,
+ "help": "This sets the SCM level for the branch being built."
+ " See https://www.mozilla.org/en-US/about/"
+ "governance/policies/commit/access-policy/"}],
+ [['--enable-pgo'], {
+ "action": "store_true",
+ "dest": "pgo_build",
+ "default": False,
+ "help": "Sets the build to run in PGO mode"}],
+ [['--enable-nightly'], {
+ "action": "store_true",
+ "dest": "nightly_build",
+ "default": False,
+ "help": "Sets the build to run in nightly mode"}],
+ [['--who'], {
+ "dest": "who",
+ "default": '',
+ "help": "stores who made the created the buildbot change."}],
+ [["--disable-mock"], {
+ "dest": "disable_mock",
+ "action": "store_true",
+ "help": "do not run under mock despite what gecko-config says",
+ }],
+
+]
+
+
+def generate_build_ID():
+ return time.strftime("%Y%m%d%H%M%S", time.localtime(time.time()))
+
+
+def generate_build_UID():
+ return uuid.uuid4().hex
+
+
+class BuildScript(BuildbotMixin, PurgeMixin, MockMixin, BalrogMixin,
+ SigningMixin, VirtualenvMixin, MercurialScript,
+ SecretsMixin, PerfherderResourceOptionsMixin):
+ def __init__(self, **kwargs):
+ # objdir is referenced in _query_abs_dirs() so let's make sure we
+ # have that attribute before calling BaseScript.__init__
+ self.objdir = None
+ super(BuildScript, self).__init__(**kwargs)
+ # epoch is only here to represent the start of the buildbot build
+ # that this mozharn script came from. until I can grab bbot's
+ # status.build.gettime()[0] this will have to do as a rough estimate
+ # although it is about 4s off from the time it would be if it was
+ # done through MBF.
+ # TODO find out if that time diff matters or if we just use it to
+ # separate each build
+ self.epoch_timestamp = int(time.mktime(datetime.now().timetuple()))
+ self.branch = self.config.get('branch')
+ self.stage_platform = self.config.get('stage_platform')
+ if not self.branch or not self.stage_platform:
+ if not self.branch:
+ self.error("'branch' not determined and is required")
+ if not self.stage_platform:
+ self.error("'stage_platform' not determined and is required")
+ self.fatal("Please add missing items to your config")
+ self.repo_path = None
+ self.buildid = None
+ self.builduid = None
+ self.query_buildid() # sets self.buildid
+ self.query_builduid() # sets self.builduid
+ self.generated_build_props = False
+ self.client_id = None
+ self.access_token = None
+
+ # Call this before creating the virtualenv so that we can support
+ # substituting config values with other config values.
+ self.query_build_env()
+
+ # We need to create the virtualenv directly (without using an action) in
+ # order to use python modules in PreScriptRun/Action listeners
+ self.create_virtualenv()
+
+ def _pre_config_lock(self, rw_config):
+ c = self.config
+ cfg_files_and_dicts = rw_config.all_cfg_files_and_dicts
+ build_pool = c.get('build_pool', '')
+ build_variant = c.get('build_variant', '')
+ variant_cfg = ''
+ if build_variant:
+ variant_cfg = BuildOptionParser.build_variants[build_variant] % (
+ BuildOptionParser.platform,
+ BuildOptionParser.bits
+ )
+ build_pool_cfg = BuildOptionParser.build_pool_cfg_file
+ branch_cfg = BuildOptionParser.branch_cfg_file
+
+ cfg_match_msg = "Script was run with '%(option)s %(type)s' and \
+'%(type)s' matches a key in '%(type_config_file)s'. Updating self.config with \
+items from that key's value."
+ pf_override_msg = "The branch '%(branch)s' has custom behavior for the \
+platform '%(platform)s'. Updating self.config with the following from \
+'platform_overrides' found in '%(pf_cfg_file)s':"
+
+ for i, (target_file, target_dict) in enumerate(cfg_files_and_dicts):
+ if branch_cfg and branch_cfg in target_file:
+ self.info(
+ cfg_match_msg % {
+ 'option': '--branch',
+ 'type': c['branch'],
+ 'type_config_file': BuildOptionParser.branch_cfg_file
+ }
+ )
+ if build_pool_cfg and build_pool_cfg in target_file:
+ self.info(
+ cfg_match_msg % {
+ 'option': '--build-pool',
+ 'type': build_pool,
+ 'type_config_file': build_pool_cfg,
+ }
+ )
+ if variant_cfg and variant_cfg in target_file:
+ self.info(
+ cfg_match_msg % {
+ 'option': '--custom-build-variant-cfg',
+ 'type': build_variant,
+ 'type_config_file': variant_cfg,
+ }
+ )
+ if c.get("platform_overrides"):
+ if c['stage_platform'] in c['platform_overrides'].keys():
+ self.info(
+ pf_override_msg % {
+ 'branch': c['branch'],
+ 'platform': c['stage_platform'],
+ 'pf_cfg_file': BuildOptionParser.branch_cfg_file
+ }
+ )
+ branch_pf_overrides = c['platform_overrides'][
+ c['stage_platform']
+ ]
+ self.info(pprint.pformat(branch_pf_overrides))
+ c.update(branch_pf_overrides)
+ self.info('To generate a config file based upon options passed and '
+ 'config files used, run script as before but extend options '
+ 'with "--dump-config"')
+ self.info('For a diff of where self.config got its items, '
+ 'run the script again as before but extend options with: '
+ '"--dump-config-hierarchy"')
+ self.info("Both --dump-config and --dump-config-hierarchy don't "
+ "actually run any actions.")
+
+ def _assert_cfg_valid_for_action(self, dependencies, action):
+ """ assert dependency keys are in config for given action.
+
+ Takes a list of dependencies and ensures that each have an
+ assoctiated key in the config. Displays error messages as
+ appropriate.
+
+ """
+ # TODO add type and value checking, not just keys
+ # TODO solution should adhere to: bug 699343
+ # TODO add this to BaseScript when the above is done
+ # for now, let's just use this as a way to save typing...
+ c = self.config
+ undetermined_keys = []
+ err_template = "The key '%s' could not be determined \
+and is needed for the action '%s'. Please add this to your config \
+or run without that action (ie: --no-{action})"
+ for dep in dependencies:
+ if dep not in c:
+ undetermined_keys.append(dep)
+ if undetermined_keys:
+ fatal_msgs = [err_template % (key, action)
+ for key in undetermined_keys]
+ self.fatal("".join(fatal_msgs))
+ # otherwise:
+ return # all good
+
+ def _query_build_prop_from_app_ini(self, prop, app_ini_path=None):
+ dirs = self.query_abs_dirs()
+ print_conf_setting_path = os.path.join(dirs['abs_src_dir'],
+ 'config',
+ 'printconfigsetting.py')
+ if not app_ini_path:
+ # set the default
+ app_ini_path = dirs['abs_app_ini_path']
+ if (os.path.exists(print_conf_setting_path) and
+ os.path.exists(app_ini_path)):
+ python = self.query_exe('python2.7')
+ cmd = [
+ python, os.path.join(dirs['abs_src_dir'], 'mach'), 'python',
+ print_conf_setting_path, app_ini_path,
+ 'App', prop
+ ]
+ env = self.query_build_env()
+ # dirs['abs_obj_dir'] can be different from env['MOZ_OBJDIR'] on
+ # mac, and that confuses mach.
+ del env['MOZ_OBJDIR']
+ return self.get_output_from_command_m(cmd,
+ cwd=dirs['abs_obj_dir'], env=env)
+ else:
+ return None
+
+ def query_builduid(self):
+ c = self.config
+ if self.builduid:
+ return self.builduid
+
+ builduid = None
+ if c.get("is_automation"):
+ if self.buildbot_config['properties'].get('builduid'):
+ self.info("Determining builduid from buildbot properties")
+ builduid = self.buildbot_config['properties']['builduid'].encode(
+ 'ascii', 'replace'
+ )
+
+ if not builduid:
+ self.info("Creating builduid through uuid hex")
+ builduid = generate_build_UID()
+
+ if c.get('is_automation'):
+ self.set_buildbot_property('builduid',
+ builduid,
+ write_to_file=True)
+ self.builduid = builduid
+ return self.builduid
+
+ def query_buildid(self):
+ c = self.config
+ if self.buildid:
+ return self.buildid
+
+ buildid = None
+ if c.get("is_automation"):
+ if self.buildbot_config['properties'].get('buildid'):
+ self.info("Determining buildid from buildbot properties")
+ buildid = self.buildbot_config['properties']['buildid'].encode(
+ 'ascii', 'replace'
+ )
+ else:
+ # for taskcluster, there are no buildbot properties, and we pass
+ # MOZ_BUILD_DATE into mozharness as an environment variable, only
+ # to have it pass the same value out with the same name.
+ buildid = os.environ.get('MOZ_BUILD_DATE')
+
+ if not buildid:
+ self.info("Creating buildid through current time")
+ buildid = generate_build_ID()
+
+ if c.get('is_automation'):
+ self.set_buildbot_property('buildid',
+ buildid,
+ write_to_file=True)
+
+ self.buildid = buildid
+ return self.buildid
+
+ def _query_objdir(self):
+ if self.objdir:
+ return self.objdir
+
+ if not self.config.get('objdir'):
+ return self.fatal(MISSING_CFG_KEY_MSG % ('objdir',))
+ self.objdir = self.config['objdir']
+ return self.objdir
+
+ def _query_repo(self):
+ if self.repo_path:
+ return self.repo_path
+ c = self.config
+
+ # we actually supply the repo in mozharness so if it's in
+ # the config, we use that (automation does not require it in
+ # buildbot props)
+ if not c.get('repo_path'):
+ repo_path = 'projects/%s' % (self.branch,)
+ self.info(
+ "repo_path not in config. Using '%s' instead" % (repo_path,)
+ )
+ else:
+ repo_path = c['repo_path']
+ self.repo_path = '%s/%s' % (c['repo_base'], repo_path,)
+ return self.repo_path
+
+ def _skip_buildbot_specific_action(self):
+ """ ignore actions from buildbot's infra."""
+ self.info("This action is specific to buildbot's infrastructure")
+ self.info("Skipping......")
+ return
+
+ def query_is_nightly_promotion(self):
+ platform_enabled = self.config.get('enable_nightly_promotion')
+ branch_enabled = self.branch in self.config.get('nightly_promotion_branches')
+ return platform_enabled and branch_enabled
+
+ def query_build_env(self, **kwargs):
+ c = self.config
+
+ # let's evoke the base query_env and make a copy of it
+ # as we don't always want every key below added to the same dict
+ env = copy.deepcopy(
+ super(BuildScript, self).query_env(**kwargs)
+ )
+
+ # first grab the buildid
+ env['MOZ_BUILD_DATE'] = self.query_buildid()
+
+ # Set the source repository to what we're building from since
+ # the default is to query `hg paths` which isn't reliable with pooled
+ # storage
+ repo_path = self._query_repo()
+ assert repo_path
+ env['MOZ_SOURCE_REPO'] = repo_path
+
+ if self.query_is_nightly() or self.query_is_nightly_promotion():
+ if self.query_is_nightly():
+ # nightly promotion needs to set update_channel but not do all the 'IS_NIGHTLY'
+ # automation parts like uploading symbols for now
+ env["IS_NIGHTLY"] = "yes"
+ # in branch_specifics.py we might set update_channel explicitly
+ if c.get('update_channel'):
+ env["MOZ_UPDATE_CHANNEL"] = c['update_channel']
+ else: # let's just give the generic channel based on branch
+ env["MOZ_UPDATE_CHANNEL"] = "nightly-%s" % (self.branch,)
+
+ if self.config.get('pgo_build') or self._compile_against_pgo():
+ env['MOZ_PGO'] = '1'
+
+ if c.get('enable_signing'):
+ if os.environ.get('MOZ_SIGNING_SERVERS'):
+ moz_sign_cmd = subprocess.list2cmdline(
+ self.query_moz_sign_cmd(formats=None)
+ )
+ # windows fix. This is passed to mach build env and we call that
+ # with python, not with bash so we need to fix the slashes here
+ env['MOZ_SIGN_CMD'] = moz_sign_cmd.replace('\\', '\\\\\\\\')
+ else:
+ self.warning("signing disabled because MOZ_SIGNING_SERVERS is not set")
+ elif 'MOZ_SIGN_CMD' in env:
+ # Ensure that signing is truly disabled
+ # MOZ_SIGN_CMD may be defined by default in buildbot (see MozillaBuildFactory)
+ self.warning("Clearing MOZ_SIGN_CMD because we don't have config['enable_signing']")
+ del env['MOZ_SIGN_CMD']
+
+ # to activate the right behaviour in mozonfigs while we transition
+ if c.get('enable_release_promotion'):
+ env['ENABLE_RELEASE_PROMOTION'] = "1"
+ update_channel = c.get('update_channel', self.branch)
+ self.info("Release promotion update channel: %s"
+ % (update_channel,))
+ env["MOZ_UPDATE_CHANNEL"] = update_channel
+
+ # we can't make env an attribute of self because env can change on
+ # every call for reasons like MOZ_SIGN_CMD
+ return env
+
+ def query_mach_build_env(self, multiLocale=None):
+ c = self.config
+ if multiLocale is None and self.query_is_nightly():
+ multiLocale = c.get('multi_locale', False)
+ mach_env = {}
+ if c.get('upload_env'):
+ mach_env.update(c['upload_env'])
+ if 'UPLOAD_HOST' in mach_env and 'stage_server' in c:
+ mach_env['UPLOAD_HOST'] = mach_env['UPLOAD_HOST'] % {
+ 'stage_server': c['stage_server']
+ }
+ if 'UPLOAD_USER' in mach_env and 'stage_username' in c:
+ mach_env['UPLOAD_USER'] = mach_env['UPLOAD_USER'] % {
+ 'stage_username': c['stage_username']
+ }
+ if 'UPLOAD_SSH_KEY' in mach_env and 'stage_ssh_key' in c:
+ mach_env['UPLOAD_SSH_KEY'] = mach_env['UPLOAD_SSH_KEY'] % {
+ 'stage_ssh_key': c['stage_ssh_key']
+ }
+
+ # this prevents taskcluster from overwriting the target files with
+ # the multilocale files. Put everything from the en-US build in a
+ # separate folder.
+ if multiLocale and self.config.get('taskcluster_nightly'):
+ if 'UPLOAD_PATH' in mach_env:
+ mach_env['UPLOAD_PATH'] = os.path.join(mach_env['UPLOAD_PATH'],
+ 'en-US')
+
+ # _query_post_upload_cmd returns a list (a cmd list), for env sake here
+ # let's make it a string
+ if c.get('is_automation'):
+ pst_up_cmd = ' '.join([str(i) for i in self._query_post_upload_cmd(multiLocale)])
+ mach_env['POST_UPLOAD_CMD'] = pst_up_cmd
+
+ return mach_env
+
+ def _compile_against_pgo(self):
+ """determines whether a build should be run with pgo even if it is
+ not a classified as a 'pgo build'.
+
+ requirements:
+ 1) must be a platform that can run against pgo
+ 2) either:
+ a) must be a nightly build
+ b) must be on a branch that runs pgo if it can everytime
+ """
+ c = self.config
+ if self.stage_platform in c['pgo_platforms']:
+ if c.get('branch_uses_per_checkin_strategy') or self.query_is_nightly():
+ return True
+ return False
+
+ def query_check_test_env(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ check_test_env = {}
+ if c.get('check_test_env'):
+ for env_var, env_value in c['check_test_env'].iteritems():
+ check_test_env[env_var] = env_value % dirs
+ return check_test_env
+
+ def _query_post_upload_cmd(self, multiLocale):
+ c = self.config
+ post_upload_cmd = ["post_upload.py"]
+ buildid = self.query_buildid()
+ revision = self.query_revision()
+ platform = self.stage_platform
+ who = self.query_who()
+ if c.get('pgo_build'):
+ platform += '-pgo'
+
+ if c.get('tinderbox_build_dir'):
+ # TODO find out if we should fail here like we are
+ if not who and not revision:
+ self.fatal("post upload failed. --tinderbox-builds-dir could "
+ "not be determined. 'who' and/or 'revision' unknown")
+ # branches like try will use 'tinderbox_build_dir
+ tinderbox_build_dir = c['tinderbox_build_dir'] % {
+ 'who': who,
+ 'got_revision': revision
+ }
+ else:
+ # the default
+ tinderbox_build_dir = "%s-%s" % (self.branch, platform)
+
+ if who and self.branch == 'try':
+ post_upload_cmd.extend(["--who", who])
+ if c.get('include_post_upload_builddir'):
+ post_upload_cmd.extend(
+ ["--builddir", "%s-%s" % (self.branch, platform)]
+ )
+ elif multiLocale:
+ # Android builds with multilocale enabled upload the en-US builds
+ # to an en-US subdirectory, and the multilocale builds to the
+ # top-level directory.
+ post_upload_cmd.extend(
+ ["--builddir", "en-US"]
+ )
+
+ post_upload_cmd.extend(["--tinderbox-builds-dir", tinderbox_build_dir])
+ post_upload_cmd.extend(["-p", c['stage_product']])
+ post_upload_cmd.extend(['-i', buildid])
+ if revision:
+ post_upload_cmd.extend(['--revision', revision])
+ if c.get('to_tinderbox_dated'):
+ post_upload_cmd.append('--release-to-tinderbox-dated-builds')
+ if c.get('release_to_try_builds'):
+ post_upload_cmd.append('--release-to-try-builds')
+ if self.query_is_nightly():
+ if c.get('post_upload_include_platform'):
+ post_upload_cmd.extend(['-b', '%s-%s' % (self.branch, platform)])
+ else:
+ post_upload_cmd.extend(['-b', self.branch])
+ post_upload_cmd.append('--release-to-dated')
+ if c['platform_supports_post_upload_to_latest']:
+ post_upload_cmd.append('--release-to-latest')
+ post_upload_cmd.extend(c.get('post_upload_extra', []))
+
+ return post_upload_cmd
+
+ def _ccache_z(self):
+ """clear ccache stats."""
+ dirs = self.query_abs_dirs()
+ env = self.query_build_env()
+ self.run_command(command=['ccache', '-z'],
+ cwd=dirs['base_work_dir'],
+ env=env)
+
+ def _ccache_s(self):
+ """print ccache stats. only done for unix like platforms"""
+ dirs = self.query_abs_dirs()
+ env = self.query_build_env()
+ cmd = ['ccache', '-s']
+ self.run_command(cmd, cwd=dirs['abs_src_dir'], env=env)
+
+ def _rm_old_package(self):
+ """rm the old package."""
+ c = self.config
+ dirs = self.query_abs_dirs()
+ old_package_paths = []
+ old_package_patterns = c.get('old_packages')
+
+ self.info("removing old packages...")
+ if os.path.exists(dirs['abs_obj_dir']):
+ for product in old_package_patterns:
+ old_package_paths.extend(
+ glob.glob(product % {"objdir": dirs['abs_obj_dir']})
+ )
+ if old_package_paths:
+ for package_path in old_package_paths:
+ self.rmtree(package_path)
+ else:
+ self.info("There wasn't any old packages to remove.")
+
+ def _get_mozconfig(self):
+ """assign mozconfig."""
+ c = self.config
+ dirs = self.query_abs_dirs()
+ abs_mozconfig_path = ''
+
+ # first determine the mozconfig path
+ if c.get('src_mozconfig') and not c.get('src_mozconfig_manifest'):
+ self.info('Using in-tree mozconfig')
+ abs_mozconfig_path = os.path.join(dirs['abs_src_dir'], c.get('src_mozconfig'))
+ elif c.get('src_mozconfig_manifest') and not c.get('src_mozconfig'):
+ self.info('Using mozconfig based on manifest contents')
+ manifest = os.path.join(dirs['abs_work_dir'], c['src_mozconfig_manifest'])
+ if not os.path.exists(manifest):
+ self.fatal('src_mozconfig_manifest: "%s" not found. Does it exist?' % (manifest,))
+ with self.opened(manifest, error_level=ERROR) as (fh, err):
+ if err:
+ self.fatal("%s exists but coud not read properties" % manifest)
+ abs_mozconfig_path = os.path.join(dirs['abs_src_dir'], json.load(fh)['gecko_path'])
+ else:
+ self.fatal("'src_mozconfig' or 'src_mozconfig_manifest' must be "
+ "in the config but not both in order to determine the mozconfig.")
+
+ # print its contents
+ content = self.read_from_file(abs_mozconfig_path, error_level=FATAL)
+ self.info("mozconfig content:")
+ self.info(content)
+
+ # finally, copy the mozconfig to a path that 'mach build' expects it to be
+ self.copyfile(abs_mozconfig_path, os.path.join(dirs['abs_src_dir'], '.mozconfig'))
+
+ # TODO: replace with ToolToolMixin
+ def _get_tooltool_auth_file(self):
+ # set the default authentication file based on platform; this
+ # corresponds to where puppet puts the token
+ if 'tooltool_authentication_file' in self.config:
+ fn = self.config['tooltool_authentication_file']
+ elif self._is_windows():
+ fn = r'c:\builds\relengapi.tok'
+ else:
+ fn = '/builds/relengapi.tok'
+
+ # if the file doesn't exist, don't pass it to tooltool (it will just
+ # fail). In taskcluster, this will work OK as the relengapi-proxy will
+ # take care of auth. Everywhere else, we'll get auth failures if
+ # necessary.
+ if os.path.exists(fn):
+ return fn
+
+ def _run_tooltool(self):
+ self._assert_cfg_valid_for_action(
+ ['tooltool_script', 'tooltool_bootstrap', 'tooltool_url'],
+ 'build'
+ )
+ c = self.config
+ dirs = self.query_abs_dirs()
+ if not c.get('tooltool_manifest_src'):
+ return self.warning(ERROR_MSGS['tooltool_manifest_undetermined'])
+ fetch_script_path = os.path.join(dirs['abs_tools_dir'],
+ 'scripts',
+ 'tooltool',
+ 'tooltool_wrapper.sh')
+ tooltool_manifest_path = os.path.join(dirs['abs_src_dir'],
+ c['tooltool_manifest_src'])
+ cmd = [
+ 'sh',
+ fetch_script_path,
+ tooltool_manifest_path,
+ c['tooltool_url'],
+ c['tooltool_bootstrap'],
+ ]
+ cmd.extend(c['tooltool_script'])
+ auth_file = self._get_tooltool_auth_file()
+ if auth_file:
+ cmd.extend(['--authentication-file', auth_file])
+ cache = c['env'].get('TOOLTOOL_CACHE')
+ if cache:
+ cmd.extend(['-c', cache])
+ self.info(str(cmd))
+ self.run_command_m(cmd, cwd=dirs['abs_src_dir'], halt_on_failure=True)
+
+ def query_revision(self, source_path=None):
+ """ returns the revision of the build
+
+ first will look for it in buildbot_properties and then in
+ buildbot_config. Failing that, it will actually poll the source of
+ the repo if it exists yet.
+
+ This method is used both to figure out what revision to check out and
+ to figure out what revision *was* checked out.
+ """
+ revision = None
+ if 'revision' in self.buildbot_properties:
+ revision = self.buildbot_properties['revision']
+ elif (self.buildbot_config and
+ self.buildbot_config.get('sourcestamp', {}).get('revision')):
+ revision = self.buildbot_config['sourcestamp']['revision']
+ elif self.buildbot_config and self.buildbot_config.get('revision'):
+ revision = self.buildbot_config['revision']
+ else:
+ if not source_path:
+ dirs = self.query_abs_dirs()
+ source_path = dirs['abs_src_dir'] # let's take the default
+
+ # Look at what we have checked out
+ if os.path.exists(source_path):
+ hg = self.query_exe('hg', return_type='list')
+ revision = self.get_output_from_command(
+ hg + ['parent', '--template', '{node}'], cwd=source_path
+ )
+ return revision.encode('ascii', 'replace') if revision else None
+
+ def _checkout_source(self):
+ """use vcs_checkout to grab source needed for build."""
+ # TODO make this method its own action
+ c = self.config
+ dirs = self.query_abs_dirs()
+ repo = self._query_repo()
+ vcs_checkout_kwargs = {
+ 'repo': repo,
+ 'dest': dirs['abs_src_dir'],
+ 'revision': self.query_revision(),
+ 'env': self.query_build_env()
+ }
+ if c.get('clone_by_revision'):
+ vcs_checkout_kwargs['clone_by_revision'] = True
+
+ if c.get('clone_with_purge'):
+ vcs_checkout_kwargs['clone_with_purge'] = True
+ vcs_checkout_kwargs['clone_upstream_url'] = c.get('clone_upstream_url')
+ rev = self.vcs_checkout(**vcs_checkout_kwargs)
+ if c.get('is_automation'):
+ changes = self.buildbot_config['sourcestamp']['changes']
+ if changes:
+ comments = changes[0].get('comments', '')
+ self.set_buildbot_property('comments',
+ comments,
+ write_to_file=True)
+ else:
+ self.warning(ERROR_MSGS['comments_undetermined'])
+ self.set_buildbot_property('got_revision',
+ rev,
+ write_to_file=True)
+
+ def _count_ctors(self):
+ """count num of ctors and set testresults."""
+ dirs = self.query_abs_dirs()
+ python_path = os.path.join(dirs['abs_work_dir'], 'venv', 'bin',
+ 'python')
+ abs_count_ctors_path = os.path.join(dirs['abs_src_dir'],
+ 'build',
+ 'util',
+ 'count_ctors.py')
+ abs_libxul_path = os.path.join(dirs['abs_obj_dir'],
+ 'dist',
+ 'bin',
+ 'libxul.so')
+
+ cmd = [python_path, abs_count_ctors_path, abs_libxul_path]
+ self.get_output_from_command(cmd, cwd=dirs['abs_src_dir'],
+ throw_exception=True)
+
+ def _generate_properties_file(self, path):
+ # TODO it would be better to grab all the properties that were
+ # persisted to file rather than use whats in the buildbot_properties
+ # live object so we become less action dependant.
+ all_current_props = dict(
+ chain(self.buildbot_config['properties'].items(),
+ self.buildbot_properties.items())
+ )
+ # graph_server_post.py expects a file with 'properties' key
+ graph_props = dict(properties=all_current_props)
+ self.dump_config(path, graph_props)
+
+ def _query_props_set_by_mach(self, console_output=True, error_level=FATAL):
+ mach_properties_path = os.path.join(
+ self.query_abs_dirs()['abs_obj_dir'], 'dist', 'mach_build_properties.json'
+ )
+ self.info("setting properties set by mach build. Looking in path: %s"
+ % mach_properties_path)
+ if os.path.exists(mach_properties_path):
+ with self.opened(mach_properties_path, error_level=error_level) as (fh, err):
+ build_props = json.load(fh)
+ if err:
+ self.log("%s exists but there was an error reading the "
+ "properties. props: `%s` - error: "
+ "`%s`" % (mach_properties_path,
+ build_props or 'None',
+ err or 'No error'),
+ error_level)
+ if console_output:
+ self.info("Properties set from 'mach build'")
+ self.info(pprint.pformat(build_props))
+ for key, prop in build_props.iteritems():
+ if prop != 'UNKNOWN':
+ self.set_buildbot_property(key, prop, write_to_file=True)
+ else:
+ self.info("No mach_build_properties.json found - not importing properties.")
+
+ def generate_build_props(self, console_output=True, halt_on_failure=False):
+ """sets props found from mach build and, in addition, buildid,
+ sourcestamp, appVersion, and appName."""
+
+ error_level = ERROR
+ if halt_on_failure:
+ error_level = FATAL
+
+ if self.generated_build_props:
+ return
+
+ # grab props set by mach if any
+ self._query_props_set_by_mach(console_output=console_output,
+ error_level=error_level)
+
+ dirs = self.query_abs_dirs()
+ print_conf_setting_path = os.path.join(dirs['abs_src_dir'],
+ 'config',
+ 'printconfigsetting.py')
+ if (not os.path.exists(print_conf_setting_path) or
+ not os.path.exists(dirs['abs_app_ini_path'])):
+ self.log("Can't set the following properties: "
+ "buildid, sourcestamp, appVersion, and appName. "
+ "Required paths missing. Verify both %s and %s "
+ "exist. These paths require the 'build' action to be "
+ "run prior to this" % (print_conf_setting_path,
+ dirs['abs_app_ini_path']),
+ level=error_level)
+ self.info("Setting properties found in: %s" % dirs['abs_app_ini_path'])
+ python = self.query_exe('python2.7')
+ base_cmd = [
+ python, os.path.join(dirs['abs_src_dir'], 'mach'), 'python',
+ print_conf_setting_path, dirs['abs_app_ini_path'], 'App'
+ ]
+ properties_needed = [
+ {'ini_name': 'SourceStamp', 'prop_name': 'sourcestamp'},
+ {'ini_name': 'Version', 'prop_name': 'appVersion'},
+ {'ini_name': 'Name', 'prop_name': 'appName'}
+ ]
+ env = self.query_build_env()
+ # dirs['abs_obj_dir'] can be different from env['MOZ_OBJDIR'] on
+ # mac, and that confuses mach.
+ del env['MOZ_OBJDIR']
+ for prop in properties_needed:
+ prop_val = self.get_output_from_command_m(
+ base_cmd + [prop['ini_name']], cwd=dirs['abs_obj_dir'],
+ halt_on_failure=halt_on_failure, env=env
+ )
+ self.set_buildbot_property(prop['prop_name'],
+ prop_val,
+ write_to_file=True)
+
+ if self.config.get('is_automation'):
+ self.info("Verifying buildid from application.ini matches buildid "
+ "from buildbot")
+ app_ini_buildid = self._query_build_prop_from_app_ini('BuildID')
+ # it would be hard to imagine query_buildid evaluating to a falsey
+ # value (e.g. 0), but incase it does, force it to None
+ buildbot_buildid = self.query_buildid() or None
+ self.info(
+ 'buildid from application.ini: "%s". buildid from buildbot '
+ 'properties: "%s"' % (app_ini_buildid, buildbot_buildid)
+ )
+ if app_ini_buildid == buildbot_buildid != None:
+ self.info('buildids match.')
+ else:
+ self.error(
+ 'buildids do not match or values could not be determined'
+ )
+ # set the build to orange if not already worse
+ self.return_code = self.worst_level(
+ EXIT_STATUS_DICT[TBPL_WARNING], self.return_code,
+ AUTOMATION_EXIT_CODES[::-1]
+ )
+
+ self.generated_build_props = True
+
+ def _initialize_taskcluster(self):
+ if self.client_id and self.access_token:
+ # Already initialized
+ return
+
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
+ credentials = {}
+ execfile(auth, credentials)
+ self.client_id = credentials.get('taskcluster_clientId')
+ self.access_token = credentials.get('taskcluster_accessToken')
+
+ # We need to create & activate the virtualenv so that we can import
+ # taskcluster (and its dependent modules, like requests and hawk).
+ # Normally we could create the virtualenv as an action, but due to some
+ # odd dependencies with query_build_env() being called from build(),
+ # which is necessary before the virtualenv can be created.
+ self.create_virtualenv()
+ self.activate_virtualenv()
+
+ routes_file = os.path.join(dirs['abs_src_dir'],
+ 'testing',
+ 'mozharness',
+ 'configs',
+ 'routes.json')
+ with open(routes_file) as f:
+ self.routes_json = json.load(f)
+
+ def _taskcluster_upload(self, files, templates, locale='en-US',
+ property_conditions=[]):
+ if not self.client_id or not self.access_token:
+ self.warning('Skipping S3 file upload: No taskcluster credentials.')
+ return
+
+ dirs = self.query_abs_dirs()
+ repo = self._query_repo()
+ revision = self.query_revision()
+ pushinfo = self.vcs_query_pushinfo(repo, revision)
+ pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))
+
+ index = self.config.get('taskcluster_index', 'index.garbage.staging')
+ fmt = {
+ 'index': index,
+ 'project': self.buildbot_config['properties']['branch'],
+ 'head_rev': revision,
+ 'pushdate': pushdate,
+ 'year': pushdate[0:4],
+ 'month': pushdate[4:6],
+ 'day': pushdate[6:8],
+ 'build_product': self.config['stage_product'],
+ 'build_name': self.query_build_name(),
+ 'build_type': self.query_build_type(),
+ 'locale': locale,
+ }
+ fmt.update(self.buildid_to_dict(self.query_buildid()))
+ routes = []
+ for template in templates:
+ routes.append(template.format(**fmt))
+ self.info("Using routes: %s" % routes)
+
+ tc = Taskcluster(
+ branch=self.branch,
+ rank=pushinfo.pushdate, # Use pushdate as the rank
+ client_id=self.client_id,
+ access_token=self.access_token,
+ log_obj=self.log_obj,
+ # `upload_to_task_id` is used by mozci to have access to where the artifacts
+ # will be uploaded
+ task_id=self.buildbot_config['properties'].get('upload_to_task_id'),
+ )
+
+ # TODO: Bug 1165980 - these should be in tree
+ routes.extend([
+ "%s.buildbot.branches.%s.%s" % (index, self.branch, self.stage_platform),
+ "%s.buildbot.revisions.%s.%s.%s" % (index, revision, self.branch, self.stage_platform),
+ ])
+ task = tc.create_task(routes)
+ tc.claim_task(task)
+
+ # Only those files uploaded with valid extensions are processed.
+ # This ensures that we get the correct packageUrl from the list.
+ valid_extensions = (
+ '.apk',
+ '.dmg',
+ '.mar',
+ '.rpm',
+ '.tar.bz2',
+ '.tar.gz',
+ '.zip',
+ '.json',
+ )
+
+ for upload_file in files:
+ # Create an S3 artifact for each file that gets uploaded. We also
+ # check the uploaded file against the property conditions so that we
+ # can set the buildbot config with the correct URLs for package
+ # locations.
+ tc.create_artifact(task, upload_file)
+ if upload_file.endswith(valid_extensions):
+ for prop, condition in property_conditions:
+ if condition(upload_file):
+ self.set_buildbot_property(prop, tc.get_taskcluster_url(upload_file))
+ break
+
+ # Upload a file with all Buildbot properties
+ # This is necessary for Buildbot Bridge test jobs work properly
+ # until we can migrate to TaskCluster
+ properties_path = os.path.join(
+ dirs['base_work_dir'],
+ 'buildbot_properties.json'
+ )
+ self._generate_properties_file(properties_path)
+ tc.create_artifact(task, properties_path)
+
+ tc.report_completed(task)
+
+ def upload_files(self):
+ self._initialize_taskcluster()
+ dirs = self.query_abs_dirs()
+
+ if self.query_is_nightly():
+ templates = self.routes_json['nightly']
+
+ # Nightly builds with l10n counterparts also publish to the
+ # 'en-US' locale.
+ if self.config.get('publish_nightly_en_US_routes'):
+ templates.extend(self.routes_json['l10n'])
+ else:
+ templates = self.routes_json['routes']
+
+ # Some trees may not be setting uploadFiles, so default to []. Normally
+ # we'd only expect to get here if the build completes successfully,
+ # which means we should have uploadFiles.
+ files = self.query_buildbot_property('uploadFiles') or []
+ if not files:
+ self.warning('No files from the build system to upload to S3: uploadFiles property is missing or empty.')
+
+ packageName = self.query_buildbot_property('packageFilename')
+ self.info('packageFilename is: %s' % packageName)
+
+ if self.config.get('use_package_as_marfile'):
+ self.info('Using packageUrl for the MAR file')
+ self.set_buildbot_property('completeMarUrl',
+ self.query_buildbot_property('packageUrl'),
+ write_to_file=True)
+
+ # Find the full path to the package in uploadFiles so we can
+ # get the size/hash of the mar
+ for upload_file in files:
+ if upload_file.endswith(packageName):
+ self.set_buildbot_property('completeMarSize',
+ self.query_filesize(upload_file),
+ write_to_file=True)
+ self.set_buildbot_property('completeMarHash',
+ self.query_sha512sum(upload_file),
+ write_to_file=True)
+ break
+
+ property_conditions = [
+ # key: property name, value: condition
+ ('symbolsUrl', lambda m: m.endswith('crashreporter-symbols.zip') or
+ m.endswith('crashreporter-symbols-full.zip')),
+ ('testsUrl', lambda m: m.endswith(('tests.tar.bz2', 'tests.zip'))),
+ ('robocopApkUrl', lambda m: m.endswith('apk') and 'robocop' in m),
+ ('jsshellUrl', lambda m: 'jsshell-' in m and m.endswith('.zip')),
+ # Temporarily use "TC" in MarUrl parameters. We don't want to
+ # override these to point to taskcluster just yet, and still
+ # need to use FTP. However, they can't be removed outright since
+ # that can affect packageUrl. See bug 1144985.
+ ('completeMarUrlTC', lambda m: m.endswith('.complete.mar')),
+ ('partialMarUrlTC', lambda m: m.endswith('.mar') and '.partial.' in m),
+ ('codeCoverageURL', lambda m: m.endswith('code-coverage-gcno.zip')),
+ ('sdkUrl', lambda m: m.endswith(('sdk.tar.bz2', 'sdk.zip'))),
+ ('testPackagesUrl', lambda m: m.endswith('test_packages.json')),
+ ('packageUrl', lambda m: m.endswith(packageName)),
+ ]
+
+ # Also upload our mozharness log files
+ files.extend([os.path.join(self.log_obj.abs_log_dir, x) for x in self.log_obj.log_files.values()])
+
+ # Also upload our buildprops.json file.
+ files.extend([os.path.join(dirs['base_work_dir'], 'buildprops.json')])
+
+ self._taskcluster_upload(files, templates,
+ property_conditions=property_conditions)
+
+ def _set_file_properties(self, file_name, find_dir, prop_type,
+ error_level=ERROR):
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ # windows fix. even bash -c loses two single slashes.
+ find_dir = find_dir.replace('\\', '\\\\\\\\')
+
+ error_msg = "Not setting props: %s{Filename, Size, Hash}" % prop_type
+ cmd = ["bash", "-c",
+ "find %s -maxdepth 1 -type f -name %s" % (find_dir, file_name)]
+ file_path = self.get_output_from_command(cmd, dirs['abs_work_dir'])
+ if not file_path:
+ self.error(error_msg)
+ self.error("Can't determine filepath with cmd: %s" % (str(cmd),))
+ return
+
+ cmd = [
+ self.query_exe('openssl'), 'dgst',
+ '-%s' % (c.get("hash_type", "sha512"),), file_path
+ ]
+ hash_prop = self.get_output_from_command(cmd, dirs['abs_work_dir'])
+ if not hash_prop:
+ self.log("undetermined hash_prop with cmd: %s" % (str(cmd),),
+ level=error_level)
+ self.log(error_msg, level=error_level)
+ return
+ self.set_buildbot_property(prop_type + 'Filename',
+ os.path.split(file_path)[1],
+ write_to_file=True)
+ self.set_buildbot_property(prop_type + 'Size',
+ os.path.getsize(file_path),
+ write_to_file=True)
+ self.set_buildbot_property(prop_type + 'Hash',
+ hash_prop.strip().split(' ', 2)[1],
+ write_to_file=True)
+
+ def clone_tools(self):
+ """clones the tools repo."""
+ self._assert_cfg_valid_for_action(['tools_repo'], 'clone_tools')
+ c = self.config
+ dirs = self.query_abs_dirs()
+ repo = {
+ 'repo': c['tools_repo'],
+ 'vcs': 'hg',
+ 'dest': dirs['abs_tools_dir'],
+ 'output_timeout': 1200,
+ }
+ self.vcs_checkout(**repo)
+
+ def _create_mozbuild_dir(self, mozbuild_path=None):
+ if not mozbuild_path:
+ env = self.query_build_env()
+ mozbuild_path = env.get('MOZBUILD_STATE_PATH')
+ if mozbuild_path:
+ self.mkdir_p(mozbuild_path)
+ else:
+ self.warning("mozbuild_path could not be determined. skipping "
+ "creating it.")
+
+ def checkout_sources(self):
+ self._checkout_source()
+
+ def preflight_build(self):
+ """set up machine state for a complete build."""
+ c = self.config
+ if c.get('enable_ccache'):
+ self._ccache_z()
+ if not self.query_is_nightly():
+ # the old package should live in source dir so we don't need to do
+ # this for nighties since we clobber the whole work_dir in
+ # clobber()
+ self._rm_old_package()
+ self._get_mozconfig()
+ self._run_tooltool()
+ self._create_mozbuild_dir()
+ mach_props = os.path.join(
+ self.query_abs_dirs()['abs_obj_dir'], 'dist', 'mach_build_properties.json'
+ )
+ if os.path.exists(mach_props):
+ self.info("Removing previous mach property file: %s" % mach_props)
+ self.rmtree(mach_props)
+
+ def build(self):
+ """builds application."""
+ env = self.query_build_env()
+ env.update(self.query_mach_build_env())
+
+ # XXX Bug 1037883 - mozconfigs can not find buildprops.json when builds
+ # are through mozharness. This is not pretty but it is a stopgap
+ # until an alternative solution is made or all builds that touch
+ # mozconfig.cache are converted to mozharness.
+ dirs = self.query_abs_dirs()
+ buildprops = os.path.join(dirs['base_work_dir'], 'buildprops.json')
+ # not finding buildprops is not an error outside of buildbot
+ if os.path.exists(buildprops):
+ self.copyfile(
+ buildprops,
+ os.path.join(dirs['abs_work_dir'], 'buildprops.json'))
+
+ # use mh config override for mach build wrapper, if it exists
+ python = self.query_exe('python2.7')
+ default_mach_build = [python, 'mach', '--log-no-times', 'build', '-v']
+ mach_build = self.query_exe('mach-build', default=default_mach_build)
+ return_code = self.run_command_m(
+ command=mach_build,
+ cwd=dirs['abs_src_dir'],
+ env=env,
+ output_timeout=self.config.get('max_build_output_timeout', 60 * 40)
+ )
+ if return_code:
+ self.return_code = self.worst_level(
+ EXIT_STATUS_DICT[TBPL_FAILURE], self.return_code,
+ AUTOMATION_EXIT_CODES[::-1]
+ )
+ self.fatal("'mach build' did not run successfully. Please check "
+ "log for errors.")
+
+ def multi_l10n(self):
+ if not self.query_is_nightly():
+ self.info("Not a nightly build, skipping multi l10n.")
+ return
+ self._initialize_taskcluster()
+
+ dirs = self.query_abs_dirs()
+ base_work_dir = dirs['base_work_dir']
+ objdir = dirs['abs_obj_dir']
+ branch = self.branch
+
+ # Building a nightly with the try repository fails because a
+ # config-file does not exist for try. Default to mozilla-central
+ # settings (arbitrarily).
+ if branch == 'try':
+ branch = 'mozilla-central'
+
+ # Some android versions share the same .json config - if
+ # multi_locale_config_platform is set, use that the .json name;
+ # otherwise, use the buildbot platform.
+ default_platform = self.buildbot_config['properties'].get('platform',
+ 'android')
+
+ multi_config_pf = self.config.get('multi_locale_config_platform',
+ default_platform)
+
+ # The l10n script location differs on buildbot and taskcluster
+ if self.config.get('taskcluster_nightly'):
+ multil10n_path = \
+ 'build/src/testing/mozharness/scripts/multil10n.py'
+ base_work_dir = os.path.join(base_work_dir, 'workspace')
+ else:
+ multil10n_path = '%s/scripts/scripts/multil10n.py' % base_work_dir,
+
+ cmd = [
+ self.query_exe('python'),
+ multil10n_path,
+ '--config-file',
+ 'multi_locale/%s_%s.json' % (branch, multi_config_pf),
+ '--config-file',
+ 'multi_locale/android-mozharness-build.json',
+ '--merge-locales',
+ '--pull-locale-source',
+ '--add-locales',
+ '--package-multi',
+ '--summary',
+ ]
+
+ self.run_command_m(cmd, env=self.query_build_env(), cwd=base_work_dir,
+ halt_on_failure=True)
+
+ package_cmd = [
+ 'make',
+ 'echo-variable-PACKAGE',
+ 'AB_CD=multi',
+ ]
+ package_filename = self.get_output_from_command_m(
+ package_cmd,
+ cwd=objdir,
+ )
+ if not package_filename:
+ self.fatal("Unable to determine the package filename for the multi-l10n build. Was trying to run: %s" % package_cmd)
+
+ self.info('Multi-l10n package filename is: %s' % package_filename)
+
+ parser = MakeUploadOutputParser(config=self.config,
+ log_obj=self.log_obj,
+ use_package_as_marfile=True,
+ package_filename=package_filename,
+ )
+ upload_cmd = ['make', 'upload', 'AB_CD=multi']
+ self.run_command_m(upload_cmd,
+ env=self.query_mach_build_env(multiLocale=False),
+ cwd=objdir, halt_on_failure=True,
+ output_parser=parser)
+ for prop in parser.matches:
+ self.set_buildbot_property(prop,
+ parser.matches[prop],
+ write_to_file=True)
+ upload_files_cmd = [
+ 'make',
+ 'echo-variable-UPLOAD_FILES',
+ 'AB_CD=multi',
+ ]
+ output = self.get_output_from_command_m(
+ upload_files_cmd,
+ cwd=objdir,
+ )
+ files = shlex.split(output)
+ abs_files = [os.path.abspath(os.path.join(objdir, f)) for f in files]
+ self._taskcluster_upload(abs_files, self.routes_json['l10n'],
+ locale='multi')
+
+ def postflight_build(self, console_output=True):
+ """grabs properties from post build and calls ccache -s"""
+ self.generate_build_props(console_output=console_output,
+ halt_on_failure=True)
+ if self.config.get('enable_ccache'):
+ self._ccache_s()
+
+ # A list of argument lists. Better names gratefully accepted!
+ mach_commands = self.config.get('postflight_build_mach_commands', [])
+ for mach_command in mach_commands:
+ self._execute_postflight_build_mach_command(mach_command)
+
+ def _execute_postflight_build_mach_command(self, mach_command_args):
+ env = self.query_build_env()
+ env.update(self.query_mach_build_env())
+ python = self.query_exe('python2.7')
+
+ command = [python, 'mach', '--log-no-times']
+ command.extend(mach_command_args)
+
+ self.run_command_m(
+ command=command,
+ cwd=self.query_abs_dirs()['abs_src_dir'],
+ env=env, output_timeout=self.config.get('max_build_output_timeout', 60 * 20),
+ halt_on_failure=True,
+ )
+
+ def preflight_package_source(self):
+ self._get_mozconfig()
+
+ def package_source(self):
+ """generates source archives and uploads them"""
+ env = self.query_build_env()
+ env.update(self.query_mach_build_env())
+ python = self.query_exe('python2.7')
+ dirs = self.query_abs_dirs()
+
+ self.run_command_m(
+ command=[python, 'mach', '--log-no-times', 'configure'],
+ cwd=dirs['abs_src_dir'],
+ env=env, output_timeout=60*3, halt_on_failure=True,
+ )
+ self.run_command_m(
+ command=[
+ 'make', 'source-package', 'hg-bundle', 'source-upload',
+ 'HG_BUNDLE_REVISION=%s' % self.query_revision(),
+ 'UPLOAD_HG_BUNDLE=1',
+ ],
+ cwd=dirs['abs_obj_dir'],
+ env=env, output_timeout=60*45, halt_on_failure=True,
+ )
+
+ def generate_source_signing_manifest(self):
+ """Sign source checksum file"""
+ env = self.query_build_env()
+ env.update(self.query_mach_build_env())
+ if env.get("UPLOAD_HOST") != "localhost":
+ self.warning("Skipping signing manifest generation. Set "
+ "UPLOAD_HOST to `localhost' to enable.")
+ return
+
+ if not env.get("UPLOAD_PATH"):
+ self.warning("Skipping signing manifest generation. Set "
+ "UPLOAD_PATH to enable.")
+ return
+
+ dirs = self.query_abs_dirs()
+ objdir = dirs['abs_obj_dir']
+
+ output = self.get_output_from_command_m(
+ command=['make', 'echo-variable-SOURCE_CHECKSUM_FILE'],
+ cwd=objdir,
+ )
+ files = shlex.split(output)
+ abs_files = [os.path.abspath(os.path.join(objdir, f)) for f in files]
+ manifest_file = os.path.join(env["UPLOAD_PATH"],
+ "signing_manifest.json")
+ self.write_to_file(manifest_file,
+ self.generate_signing_manifest(abs_files))
+
+ def check_test(self):
+ if self.config.get('forced_artifact_build'):
+ self.info('Skipping due to forced artifact build.')
+ return
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ env = self.query_build_env()
+ env.update(self.query_check_test_env())
+
+ if c.get('enable_pymake'): # e.g. windows
+ pymake_path = os.path.join(dirs['abs_src_dir'], 'build',
+ 'pymake', 'make.py')
+ cmd = ['python', pymake_path]
+ else:
+ cmd = ['make']
+ cmd.extend(['-k', 'check'])
+
+ parser = CheckTestCompleteParser(config=c,
+ log_obj=self.log_obj)
+ return_code = self.run_command_m(command=cmd,
+ cwd=dirs['abs_obj_dir'],
+ env=env,
+ output_parser=parser)
+ tbpl_status = parser.evaluate_parser(return_code)
+ return_code = EXIT_STATUS_DICT[tbpl_status]
+
+ if return_code:
+ self.return_code = self.worst_level(
+ return_code, self.return_code,
+ AUTOMATION_EXIT_CODES[::-1]
+ )
+ self.error("'make -k check' did not run successfully. Please check "
+ "log for errors.")
+
+ def _load_build_resources(self):
+ p = self.config.get('build_resources_path') % self.query_abs_dirs()
+ if not os.path.exists(p):
+ self.info('%s does not exist; not loading build resources' % p)
+ return None
+
+ with open(p, 'rb') as fh:
+ resources = json.load(fh)
+
+ if 'duration' not in resources:
+ self.info('resource usage lacks duration; ignoring')
+ return None
+
+ data = {
+ 'name': 'build times',
+ 'value': resources['duration'],
+ 'extraOptions': self.perfherder_resource_options(),
+ 'subtests': [],
+ }
+
+ for phase in resources['phases']:
+ if 'duration' not in phase:
+ continue
+ data['subtests'].append({
+ 'name': phase['name'],
+ 'value': phase['duration'],
+ })
+
+ return data
+
+ def generate_build_stats(self):
+ """grab build stats following a compile.
+
+ This action handles all statistics from a build: 'count_ctors'
+ and then posts to graph server the results.
+ We only post to graph server for non nightly build
+ """
+ if self.config.get('forced_artifact_build'):
+ self.info('Skipping due to forced artifact build.')
+ return
+
+ import tarfile
+ import zipfile
+ c = self.config
+
+ if c.get('enable_count_ctors'):
+ self.info("counting ctors...")
+ self._count_ctors()
+ else:
+ self.info("ctors counts are disabled for this build.")
+
+ # Report some important file sizes for display in treeherder
+
+ dirs = self.query_abs_dirs()
+ packageName = self.query_buildbot_property('packageFilename')
+
+ # if packageName is not set because we are not running in Buildbot,
+ # then assume we are using MOZ_SIMPLE_PACKAGE_NAME, which means the
+ # package is named one of target.{tar.bz2,zip,dmg}.
+ if not packageName:
+ dist_dir = os.path.join(dirs['abs_obj_dir'], 'dist')
+ for ext in ['apk', 'dmg', 'tar.bz2', 'zip']:
+ name = 'target.' + ext
+ if os.path.exists(os.path.join(dist_dir, name)):
+ packageName = name
+ break
+ else:
+ self.fatal("could not determine packageName")
+
+ interests = ['libxul.so', 'classes.dex', 'omni.ja']
+ installer = os.path.join(dirs['abs_obj_dir'], 'dist', packageName)
+ installer_size = 0
+ size_measurements = []
+
+ if os.path.exists(installer):
+ installer_size = self.query_filesize(installer)
+ self.info('TinderboxPrint: Size of %s<br/>%s bytes\n' % (
+ packageName, installer_size))
+ try:
+ subtests = {}
+ if zipfile.is_zipfile(installer):
+ with zipfile.ZipFile(installer, 'r') as zf:
+ for zi in zf.infolist():
+ name = os.path.basename(zi.filename)
+ size = zi.file_size
+ if name in interests:
+ if name in subtests:
+ # File seen twice in same archive;
+ # ignore to avoid confusion.
+ subtests[name] = None
+ else:
+ subtests[name] = size
+ elif tarfile.is_tarfile(installer):
+ with tarfile.open(installer, 'r:*') as tf:
+ for ti in tf:
+ name = os.path.basename(ti.name)
+ size = ti.size
+ if name in interests:
+ if name in subtests:
+ # File seen twice in same archive;
+ # ignore to avoid confusion.
+ subtests[name] = None
+ else:
+ subtests[name] = size
+ for name in subtests:
+ if subtests[name] is not None:
+ self.info('TinderboxPrint: Size of %s<br/>%s bytes\n' % (
+ name, subtests[name]))
+ size_measurements.append({'name': name, 'value': subtests[name]})
+ except:
+ self.info('Unable to search %s for component sizes.' % installer)
+ size_measurements = []
+
+ perfherder_data = {
+ "framework": {
+ "name": "build_metrics"
+ },
+ "suites": [],
+ }
+ if installer_size or size_measurements:
+ perfherder_data["suites"].append({
+ "name": "installer size",
+ "value": installer_size,
+ "alertThreshold": 0.25,
+ "subtests": size_measurements
+ })
+
+ build_metrics = self._load_build_resources()
+ if build_metrics:
+ perfherder_data['suites'].append(build_metrics)
+
+ if perfherder_data["suites"]:
+ self.info('PERFHERDER_DATA: %s' % json.dumps(perfherder_data))
+
+ def sendchange(self):
+ if os.environ.get('TASK_ID'):
+ self.info("We are not running this in buildbot; skipping")
+ return
+
+ if self.config.get('enable_talos_sendchange'):
+ self._do_sendchange('talos')
+ else:
+ self.info("'enable_talos_sendchange' is false; skipping")
+
+ if self.config.get('enable_unittest_sendchange'):
+ self._do_sendchange('unittest')
+ else:
+ self.info("'enable_unittest_sendchange' is false; skipping")
+
+ def _do_sendchange(self, test_type):
+ c = self.config
+
+ # grab any props available from this or previous unclobbered runs
+ self.generate_build_props(console_output=False,
+ halt_on_failure=False)
+
+ installer_url = self.query_buildbot_property('packageUrl')
+ if not installer_url:
+ # don't burn the job but we should turn orange
+ self.error("could not determine packageUrl property to use "
+ "against sendchange. Was it set after 'mach build'?")
+ self.return_code = self.worst_level(
+ 1, self.return_code, AUTOMATION_EXIT_CODES[::-1]
+ )
+ self.return_code = 1
+ return
+ tests_url = self.query_buildbot_property('testsUrl')
+ # Contains the url to a manifest describing the test packages required
+ # for each unittest harness.
+ # For the moment this property is only set on desktop builds. Android
+ # builds find the packages manifest based on the upload
+ # directory of the installer.
+ test_packages_url = self.query_buildbot_property('testPackagesUrl')
+ pgo_build = c.get('pgo_build', False) or self._compile_against_pgo()
+
+ # these cmds are sent to mach through env vars. We won't know the
+ # packageUrl or testsUrl until mach runs upload target so we let mach
+ # fill in the rest of the cmd
+ sendchange_props = {
+ 'buildid': self.query_buildid(),
+ 'builduid': self.query_builduid(),
+ 'pgo_build': pgo_build,
+ }
+ if self.query_is_nightly():
+ sendchange_props['nightly_build'] = True
+ if test_type == 'talos':
+ if pgo_build:
+ build_type = 'pgo-'
+ else: # we don't do talos sendchange for debug so no need to check
+ build_type = '' # leave 'opt' out of branch for talos
+ talos_branch = "%s-%s-%s%s" % (self.branch,
+ self.stage_platform,
+ build_type,
+ 'talos')
+ self.invoke_sendchange(downloadables=[installer_url],
+ branch=talos_branch,
+ username='sendchange',
+ sendchange_props=sendchange_props)
+ elif test_type == 'unittest':
+ # do unittest sendchange
+ if c.get('debug_build'):
+ build_type = '' # for debug builds we append nothing
+ elif pgo_build:
+ build_type = '-pgo'
+ else: # generic opt build
+ build_type = '-opt'
+
+ if c.get('unittest_platform'):
+ platform = c['unittest_platform']
+ else:
+ platform = self.stage_platform
+
+ platform_and_build_type = "%s%s" % (platform, build_type)
+ unittest_branch = "%s-%s-%s" % (self.branch,
+ platform_and_build_type,
+ 'unittest')
+
+ downloadables = [installer_url]
+ if test_packages_url:
+ downloadables.append(test_packages_url)
+ else:
+ downloadables.append(tests_url)
+
+ self.invoke_sendchange(downloadables=downloadables,
+ branch=unittest_branch,
+ sendchange_props=sendchange_props)
+ else:
+ self.fatal('type: "%s" is unknown for sendchange type. valid '
+ 'strings are "unittest" or "talos"' % test_type)
+
+ def update(self):
+ """ submit balrog update steps. """
+ if self.config.get('forced_artifact_build'):
+ self.info('Skipping due to forced artifact build.')
+ return
+ if not self.query_is_nightly():
+ self.info("Not a nightly build, skipping balrog submission.")
+ return
+
+ # grab any props available from this or previous unclobbered runs
+ self.generate_build_props(console_output=False,
+ halt_on_failure=False)
+
+ # generate balrog props as artifacts
+ if self.config.get('taskcluster_nightly'):
+ env = self.query_mach_build_env(multiLocale=False)
+ props_path = os.path.join(env["UPLOAD_PATH"],
+ 'balrog_props.json')
+ self.generate_balrog_props(props_path)
+ return
+
+ if not self.config.get("balrog_servers"):
+ self.fatal("balrog_servers not set; skipping balrog submission.")
+ return
+
+ if self.submit_balrog_updates():
+ # set the build to orange so it is at least caught
+ self.return_code = self.worst_level(
+ EXIT_STATUS_DICT[TBPL_WARNING], self.return_code,
+ AUTOMATION_EXIT_CODES[::-1]
+ )
+
+ def valgrind_test(self):
+ '''Execute mach's valgrind-test for memory leaks'''
+ env = self.query_build_env()
+ env.update(self.query_mach_build_env())
+
+ python = self.query_exe('python2.7')
+ return_code = self.run_command_m(
+ command=[python, 'mach', 'valgrind-test'],
+ cwd=self.query_abs_dirs()['abs_src_dir'],
+ env=env, output_timeout=self.config.get('max_build_output_timeout', 60 * 40)
+ )
+ if return_code:
+ self.return_code = self.worst_level(
+ EXIT_STATUS_DICT[TBPL_FAILURE], self.return_code,
+ AUTOMATION_EXIT_CODES[::-1]
+ )
+ self.fatal("'mach valgrind-test' did not run successfully. Please check "
+ "log for errors.")
+
+
+
+ def _post_fatal(self, message=None, exit_code=None):
+ if not self.return_code: # only overwrite return_code if it's 0
+ self.error('setting return code to 2 because fatal was called')
+ self.return_code = 2
+
+ @PostScriptRun
+ def _summarize(self):
+ """ If this is run in automation, ensure the return code is valid and
+ set it to one if it's not. Finally, log any summaries we collected
+ from the script run.
+ """
+ if self.config.get("is_automation"):
+ # let's ignore all mention of buildbot/tbpl status until this
+ # point so it will be easier to manage
+ if self.return_code not in AUTOMATION_EXIT_CODES:
+ self.error("Return code is set to: %s and is outside of "
+ "automation's known values. Setting to 2(failure). "
+ "Valid return codes %s" % (self.return_code,
+ AUTOMATION_EXIT_CODES))
+ self.return_code = 2
+ for status, return_code in EXIT_STATUS_DICT.iteritems():
+ if return_code == self.return_code:
+ self.buildbot_status(status, TBPL_STATUS_DICT[status])
+ self.summary()
diff --git a/testing/mozharness/mozharness/mozilla/building/hazards.py b/testing/mozharness/mozharness/mozilla/building/hazards.py
new file mode 100644
index 000000000..6de235f89
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/building/hazards.py
@@ -0,0 +1,241 @@
+import os
+import json
+import re
+
+from mozharness.base.errors import MakefileErrorList
+from mozharness.mozilla.buildbot import TBPL_WARNING
+
+
+class HazardError(Exception):
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return repr(self.value)
+
+ # Logging ends up calling splitlines directly on what is being logged, which would fail.
+ def splitlines(self):
+ return str(self).splitlines()
+
+class HazardAnalysis(object):
+ def clobber_shell(self, builder):
+ """Clobber the specially-built JS shell used to run the analysis"""
+ dirs = builder.query_abs_dirs()
+ builder.rmtree(dirs['shell_objdir'])
+
+ def configure_shell(self, builder):
+ """Configure the specially-built JS shell used to run the analysis"""
+ dirs = builder.query_abs_dirs()
+
+ if not os.path.exists(dirs['shell_objdir']):
+ builder.mkdir_p(dirs['shell_objdir'])
+
+ js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
+ rc = builder.run_command(['autoconf-2.13'],
+ cwd=js_src_dir,
+ env=builder.env,
+ error_list=MakefileErrorList)
+ if rc != 0:
+ rc = builder.run_command(['autoconf2.13'],
+ cwd=js_src_dir,
+ env=builder.env,
+ error_list=MakefileErrorList)
+ if rc != 0:
+ raise HazardError("autoconf failed, can't continue.")
+
+ rc = builder.run_command([os.path.join(js_src_dir, 'configure'),
+ '--enable-optimize',
+ '--disable-debug',
+ '--enable-ctypes',
+ '--with-system-nspr',
+ '--without-intl-api'],
+ cwd=dirs['shell_objdir'],
+ env=builder.env,
+ error_list=MakefileErrorList)
+ if rc != 0:
+ raise HazardError("Configure failed, can't continue.")
+
+ def build_shell(self, builder):
+ """Build a JS shell specifically for running the analysis"""
+ dirs = builder.query_abs_dirs()
+
+ rc = builder.run_command(['make', '-j', str(builder.config.get('concurrency', 4)), '-s'],
+ cwd=dirs['shell_objdir'],
+ env=builder.env,
+ error_list=MakefileErrorList)
+ if rc != 0:
+ raise HazardError("Build failed, can't continue.")
+
+ def clobber(self, builder):
+ """Clobber all of the old analysis data. Note that theoretically we could do
+ incremental analyses, but they seem to still be buggy."""
+ dirs = builder.query_abs_dirs()
+ builder.rmtree(dirs['abs_analysis_dir'])
+ builder.rmtree(dirs['abs_analyzed_objdir'])
+
+ def setup(self, builder):
+ """Prepare the config files and scripts for running the analysis"""
+ dirs = builder.query_abs_dirs()
+ analysis_dir = dirs['abs_analysis_dir']
+
+ if not os.path.exists(analysis_dir):
+ builder.mkdir_p(analysis_dir)
+
+ js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
+
+ values = {
+ 'js': os.path.join(dirs['shell_objdir'], 'dist', 'bin', 'js'),
+ 'analysis_scriptdir': os.path.join(js_src_dir, 'devtools', 'rootAnalysis'),
+ 'source_objdir': dirs['abs_analyzed_objdir'],
+ 'source': os.path.join(dirs['abs_work_dir'], 'source'),
+ 'sixgill': os.path.join(dirs['abs_work_dir'], builder.config['sixgill']),
+ 'sixgill_bin': os.path.join(dirs['abs_work_dir'], builder.config['sixgill_bin']),
+ 'gcc_bin': os.path.join(dirs['abs_work_dir'], 'gcc'),
+ }
+ defaults = """
+js = '%(js)s'
+analysis_scriptdir = '%(analysis_scriptdir)s'
+objdir = '%(source_objdir)s'
+source = '%(source)s'
+sixgill = '%(sixgill)s'
+sixgill_bin = '%(sixgill_bin)s'
+gcc_bin = '%(gcc_bin)s'
+jobs = 4
+""" % values
+
+ defaults_path = os.path.join(analysis_dir, 'defaults.py')
+ file(defaults_path, "w").write(defaults)
+ builder.log("Wrote analysis config file " + defaults_path)
+
+ build_script = builder.config['build_command']
+ builder.copyfile(os.path.join(dirs['mozharness_scriptdir'],
+ os.path.join('spidermonkey', build_script)),
+ os.path.join(analysis_dir, build_script),
+ copystat=True)
+
+ def run(self, builder, env, error_list):
+ """Execute the analysis, which consists of building all analyzed
+ source code with a GCC plugin active that siphons off the interesting
+ data, then running some JS scripts over the databases created by
+ the plugin."""
+ dirs = builder.query_abs_dirs()
+ analysis_dir = dirs['abs_analysis_dir']
+ analysis_scriptdir = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'])
+
+ build_script = builder.config['build_command']
+ build_script = os.path.abspath(os.path.join(analysis_dir, build_script))
+
+ cmd = [
+ builder.config['python'],
+ os.path.join(analysis_scriptdir, 'analyze.py'),
+ "--source", dirs['gecko_src'],
+ "--buildcommand", build_script,
+ ]
+ retval = builder.run_command(cmd,
+ cwd=analysis_dir,
+ env=env,
+ error_list=error_list)
+ if retval != 0:
+ raise HazardError("failed to build")
+
+ def collect_output(self, builder):
+ """Gather up the analysis output and place in the upload dir."""
+ dirs = builder.query_abs_dirs()
+ analysis_dir = dirs['abs_analysis_dir']
+ upload_dir = dirs['abs_blob_upload_dir']
+ builder.mkdir_p(upload_dir)
+ files = (('rootingHazards.txt',
+ 'rooting_hazards',
+ 'list of rooting hazards, unsafe references, and extra roots'),
+ ('gcFunctions.txt',
+ 'gcFunctions',
+ 'list of functions that can gc, and why'),
+ ('allFunctions.txt',
+ 'allFunctions',
+ 'list of all functions that were compiled'),
+ ('gcTypes.txt',
+ 'gcTypes',
+ 'list of types containing unrooted gc pointers'),
+ ('unnecessary.txt',
+ 'extra',
+ 'list of extra roots (rooting with no GC function in scope)'),
+ ('refs.txt',
+ 'refs',
+ 'list of unsafe references to unrooted pointers'),
+ ('hazards.txt',
+ 'hazards',
+ 'list of just the hazards, together with gcFunction reason for each'))
+ for f, short, long in files:
+ builder.copy_to_upload_dir(os.path.join(analysis_dir, f),
+ short_desc=short,
+ long_desc=long,
+ compress=False, # blobber will compress
+ upload_dir=upload_dir)
+ print("== Hazards (temporarily inline here, beware weirdly interleaved output, see bug 1211402) ==")
+ print(file(os.path.join(analysis_dir, "hazards.txt")).read())
+
+ def upload_results(self, builder):
+ """Upload the results of the analysis."""
+ pass
+
+ def check_expectations(self, builder):
+ """Compare the actual to expected number of problems."""
+ if 'expect_file' not in builder.config:
+ builder.info('No expect_file given; skipping comparison with expected hazard count')
+ return
+
+ dirs = builder.query_abs_dirs()
+ analysis_dir = dirs['abs_analysis_dir']
+ analysis_scriptdir = os.path.join(dirs['gecko_src'], 'js', 'src', 'devtools', 'rootAnalysis')
+ expect_file = os.path.join(analysis_scriptdir, builder.config['expect_file'])
+ expect = builder.read_from_file(expect_file)
+ if expect is None:
+ raise HazardError("could not load expectation file")
+ data = json.loads(expect)
+
+ num_hazards = 0
+ num_refs = 0
+ with builder.opened(os.path.join(analysis_dir, "rootingHazards.txt")) as (hazards_fh, err):
+ if err:
+ raise HazardError("hazards file required")
+ for line in hazards_fh:
+ m = re.match(r"^Function.*has unrooted.*live across GC call", line)
+ if m:
+ num_hazards += 1
+
+ m = re.match(r'^Function.*takes unsafe address of unrooted', line)
+ if m:
+ num_refs += 1
+
+ expect_hazards = data.get('expect-hazards')
+ status = []
+ if expect_hazards is None:
+ status.append("%d hazards" % num_hazards)
+ else:
+ status.append("%d/%d hazards allowed" % (num_hazards, expect_hazards))
+
+ if expect_hazards is not None and expect_hazards != num_hazards:
+ if expect_hazards < num_hazards:
+ builder.warning("TEST-UNEXPECTED-FAIL %d more hazards than expected (expected %d, saw %d)" %
+ (num_hazards - expect_hazards, expect_hazards, num_hazards))
+ builder.buildbot_status(TBPL_WARNING)
+ else:
+ builder.info("%d fewer hazards than expected! (expected %d, saw %d)" %
+ (expect_hazards - num_hazards, expect_hazards, num_hazards))
+
+ expect_refs = data.get('expect-refs')
+ if expect_refs is None:
+ status.append("%d unsafe refs" % num_refs)
+ else:
+ status.append("%d/%d unsafe refs allowed" % (num_refs, expect_refs))
+
+ if expect_refs is not None and expect_refs != num_refs:
+ if expect_refs < num_refs:
+ builder.warning("TEST-UNEXPECTED-FAIL %d more unsafe refs than expected (expected %d, saw %d)" %
+ (num_refs - expect_refs, expect_refs, num_refs))
+ builder.buildbot_status(TBPL_WARNING)
+ else:
+ builder.info("%d fewer unsafe refs than expected! (expected %d, saw %d)" %
+ (expect_refs - num_refs, expect_refs, num_refs))
+
+ builder.info("TinderboxPrint: " + ", ".join(status))
diff --git a/testing/mozharness/mozharness/mozilla/checksums.py b/testing/mozharness/mozharness/mozilla/checksums.py
new file mode 100644
index 000000000..6b8997375
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/checksums.py
@@ -0,0 +1,21 @@
+def parse_checksums_file(checksums):
+ """Parses checksums files that the build system generates and uploads:
+ https://hg.mozilla.org/mozilla-central/file/default/build/checksums.py"""
+ fileInfo = {}
+ for line in checksums.splitlines():
+ hash_, type_, size, file_ = line.split(None, 3)
+ size = int(size)
+ if size < 0:
+ raise ValueError("Found negative value (%d) for size." % size)
+ if file_ not in fileInfo:
+ fileInfo[file_] = {"hashes": {}}
+ # If the file already exists, make sure that the size matches the
+ # previous entry.
+ elif fileInfo[file_]['size'] != size:
+ raise ValueError("Found different sizes for same file %s (%s and %s)" % (file_, fileInfo[file_]['size'], size))
+ # Same goes for the hash.
+ elif type_ in fileInfo[file_]['hashes'] and fileInfo[file_]['hashes'][type_] != hash_:
+ raise ValueError("Found different %s hashes for same file %s (%s and %s)" % (type_, file_, fileInfo[file_]['hashes'][type_], hash_))
+ fileInfo[file_]['size'] = size
+ fileInfo[file_]['hashes'][type_] = hash_
+ return fileInfo
diff --git a/testing/mozharness/mozharness/mozilla/l10n/__init__.py b/testing/mozharness/mozharness/mozilla/l10n/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/l10n/__init__.py
diff --git a/testing/mozharness/mozharness/mozilla/l10n/locales.py b/testing/mozharness/mozharness/mozilla/l10n/locales.py
new file mode 100755
index 000000000..24920ae44
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/l10n/locales.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Localization.
+"""
+
+import os
+from urlparse import urljoin
+import sys
+from copy import deepcopy
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.config import parse_config_file
+from mozharness.base.errors import PythonErrorList
+from mozharness.base.parallel import ChunkingMixin
+
+
+# LocalesMixin {{{1
+class LocalesMixin(ChunkingMixin):
+ def __init__(self, **kwargs):
+ """ Mixins generally don't have an __init__.
+ This breaks super().__init__() for children.
+ However, this is needed to override the query_abs_dirs()
+ """
+ self.abs_dirs = None
+ self.locales = None
+ self.gecko_locale_revisions = None
+ self.l10n_revisions = {}
+
+ def query_locales(self):
+ if self.locales is not None:
+ return self.locales
+ c = self.config
+ ignore_locales = c.get("ignore_locales", [])
+ additional_locales = c.get("additional_locales", [])
+ # List of locales can be set by using different methods in the
+ # following order:
+ # 1. "locales" buildbot property: a string of locale:revision separated
+ # by space
+ # 2. "MOZ_LOCALES" env variable: a string of locale:revision separated
+ # by space
+ # 3. self.config["locales"] which can be either coming from the config
+ # or from --locale command line argument
+ # 4. using self.config["locales_file"] l10n changesets file
+ locales = None
+
+ # Buildbot property
+ if hasattr(self, 'read_buildbot_config'):
+ self.read_buildbot_config()
+ if self.buildbot_config:
+ locales = self.buildbot_config['properties'].get("locales")
+ if locales:
+ self.info("Using locales from buildbot: %s" % locales)
+ locales = locales.split()
+ else:
+ self.info("'read_buildbot_config()' is missing, ignoring buildbot"
+ " properties")
+
+ # Environment variable
+ if not locales and "MOZ_LOCALES" in os.environ:
+ self.debug("Using locales from environment: %s" %
+ os.environ["MOZ_LOCALES"])
+ locales = os.environ["MOZ_LOCALES"].split()
+
+ # Command line or config
+ if not locales and c.get("locales", None):
+ locales = c["locales"]
+ self.debug("Using locales from config/CLI: %s" % locales)
+
+ # parse locale:revision if set
+ if locales:
+ for l in locales:
+ if ":" in l:
+ # revision specified in locale string
+ locale, revision = l.split(":", 1)
+ self.debug("Using %s:%s" % (locale, revision))
+ self.l10n_revisions[locale] = revision
+ # clean up locale by removing revisions
+ locales = [l.split(":")[0] for l in locales]
+
+ if not locales and 'locales_file' in c:
+ locales_file = os.path.join(c['base_work_dir'], c['work_dir'],
+ c['locales_file'])
+ locales = self.parse_locales_file(locales_file)
+
+ if not locales:
+ self.fatal("No locales set!")
+
+ for locale in ignore_locales:
+ if locale in locales:
+ self.debug("Ignoring locale %s." % locale)
+ locales.remove(locale)
+ if locale in self.l10n_revisions:
+ del self.l10n_revisions[locale]
+
+ for locale in additional_locales:
+ if locale not in locales:
+ self.debug("Adding locale %s." % locale)
+ locales.append(locale)
+
+ if not locales:
+ return None
+ if 'total_locale_chunks' and 'this_locale_chunk' in c:
+ self.debug("Pre-chunking locale list: %s" % str(locales))
+ locales = self.query_chunked_list(locales,
+ c['this_locale_chunk'],
+ c['total_locale_chunks'],
+ sort=True)
+ self.debug("Post-chunking locale list: %s" % locales)
+ self.locales = locales
+ return self.locales
+
+ def list_locales(self):
+ """ Stub action method.
+ """
+ self.info("Locale list: %s" % str(self.query_locales()))
+
+ def parse_locales_file(self, locales_file):
+ locales = []
+ c = self.config
+ platform = c.get("locales_platform", None)
+
+ if locales_file.endswith('json'):
+ locales_json = parse_config_file(locales_file)
+ for locale in locales_json.keys():
+ if isinstance(locales_json[locale], dict):
+ if platform and platform not in locales_json[locale]['platforms']:
+ continue
+ self.l10n_revisions[locale] = locales_json[locale]['revision']
+ else:
+ # some other way of getting this?
+ self.l10n_revisions[locale] = 'default'
+ locales.append(locale)
+ else:
+ locales = self.read_from_file(locales_file).split()
+ return locales
+
+ def run_compare_locales(self, locale, halt_on_failure=False):
+ dirs = self.query_abs_dirs()
+ env = self.query_l10n_env()
+ python = self.query_exe('python2.7')
+ compare_locales_error_list = list(PythonErrorList)
+ self.rmtree(dirs['abs_merge_dir'])
+ self.mkdir_p(dirs['abs_merge_dir'])
+ command = [python, 'mach', 'compare-locales',
+ '--merge-dir', dirs['abs_merge_dir'],
+ '--l10n-ini', os.path.join(dirs['abs_locales_src_dir'], 'l10n.ini'),
+ '--l10n-base', dirs['abs_l10n_dir'], locale]
+ self.info("*** BEGIN compare-locales %s" % locale)
+ status = self.run_command(command,
+ halt_on_failure=halt_on_failure,
+ env=env,
+ cwd=dirs['abs_mozilla_dir'],
+ error_list=compare_locales_error_list)
+ self.info("*** END compare-locales %s" % locale)
+ return status
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(LocalesMixin, self).query_abs_dirs()
+ c = self.config
+ dirs = {}
+ dirs['abs_work_dir'] = os.path.join(c['base_work_dir'],
+ c['work_dir'])
+ # TODO prettify this up later
+ if 'l10n_dir' in c:
+ dirs['abs_l10n_dir'] = os.path.join(dirs['abs_work_dir'],
+ c['l10n_dir'])
+ if 'mozilla_dir' in c:
+ dirs['abs_mozilla_dir'] = os.path.join(dirs['abs_work_dir'],
+ c['mozilla_dir'])
+ dirs['abs_locales_src_dir'] = os.path.join(dirs['abs_mozilla_dir'],
+ c['locales_dir'])
+ dirs['abs_compare_locales_dir'] = os.path.join(dirs['abs_mozilla_dir'],
+ 'python', 'compare-locales',
+ 'compare_locales')
+ else:
+ # Use old-compare-locales if no mozilla_dir set, needed
+ # for clobberer, and existing mozharness tests.
+ dirs['abs_compare_locales_dir'] = os.path.join(dirs['abs_work_dir'],
+ 'compare-locales')
+
+ if 'objdir' in c:
+ if os.path.isabs(c['objdir']):
+ dirs['abs_objdir'] = c['objdir']
+ else:
+ dirs['abs_objdir'] = os.path.join(dirs['abs_mozilla_dir'],
+ c['objdir'])
+ dirs['abs_merge_dir'] = os.path.join(dirs['abs_objdir'],
+ 'merged')
+ dirs['abs_locales_dir'] = os.path.join(dirs['abs_objdir'],
+ c['locales_dir'])
+
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ # This requires self to inherit a VCSMixin.
+ def pull_locale_source(self, hg_l10n_base=None, parent_dir=None, vcs='hg'):
+ c = self.config
+ if not hg_l10n_base:
+ hg_l10n_base = c['hg_l10n_base']
+ if parent_dir is None:
+ parent_dir = self.query_abs_dirs()['abs_l10n_dir']
+ self.mkdir_p(parent_dir)
+ repos = []
+ replace_dict = {}
+ # This block is to allow for pulling buildbot-configs in Fennec
+ # release builds, since we don't pull it in MBF anymore.
+ if c.get("l10n_repos"):
+ if c.get("user_repo_override"):
+ replace_dict['user_repo_override'] = c['user_repo_override']
+ for repo_dict in deepcopy(c['l10n_repos']):
+ repo_dict['repo'] = repo_dict['repo'] % replace_dict
+ repos.append(repo_dict)
+ else:
+ repos = c.get("l10n_repos")
+ self.vcs_checkout_repos(repos, tag_override=c.get('tag_override'))
+ # Pull locales
+ locales = self.query_locales()
+ locale_repos = []
+ if c.get("user_repo_override"):
+ hg_l10n_base = hg_l10n_base % {"user_repo_override": c["user_repo_override"]}
+ for locale in locales:
+ tag = c.get('hg_l10n_tag', 'default')
+ if self.l10n_revisions.get(locale):
+ tag = self.l10n_revisions[locale]
+ locale_repos.append({
+ 'repo': "%s/%s" % (hg_l10n_base, locale),
+ 'branch': tag,
+ 'vcs': vcs
+ })
+ revs = self.vcs_checkout_repos(repo_list=locale_repos,
+ parent_dir=parent_dir,
+ tag_override=c.get('tag_override'))
+ self.gecko_locale_revisions = revs
+
+ def query_l10n_repo(self):
+ # Find the name of our repository
+ mozilla_dir = self.config['mozilla_dir']
+ repo = None
+ for repository in self.config['repos']:
+ if repository.get('dest') == mozilla_dir:
+ repo = repository['repo']
+ break
+ return repo
+
+# GaiaLocalesMixin {{{1
+class GaiaLocalesMixin(object):
+ gaia_locale_revisions = None
+
+ def pull_gaia_locale_source(self, l10n_config, locales, base_dir):
+ root = l10n_config['root']
+ # urljoin will strip the last part of root if it doesn't end with "/"
+ if not root.endswith('/'):
+ root = root + '/'
+ vcs = l10n_config['vcs']
+ env = l10n_config.get('env', {})
+ repos = []
+ for locale in locales:
+ repos.append({
+ 'repo': urljoin(root, locale),
+ 'dest': locale,
+ 'vcs': vcs,
+ 'env': env,
+ })
+ self.gaia_locale_revisions = self.vcs_checkout_repos(repo_list=repos, parent_dir=base_dir)
+
+
+# __main__ {{{1
+
+if __name__ == '__main__':
+ pass
diff --git a/testing/mozharness/mozharness/mozilla/l10n/multi_locale_build.py b/testing/mozharness/mozharness/mozilla/l10n/multi_locale_build.py
new file mode 100755
index 000000000..5bdbc8011
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/l10n/multi_locale_build.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""multi_locale_build.py
+
+This should be a mostly generic multilocale build script.
+"""
+
+from copy import deepcopy
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.errors import MakefileErrorList, SSHErrorList
+from mozharness.base.log import FATAL
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.l10n.locales import LocalesMixin
+
+
+# MultiLocaleBuild {{{1
+class MultiLocaleBuild(LocalesMixin, MercurialScript):
+ """ This class targets Fennec multilocale builds.
+ We were considering this for potential Firefox desktop multilocale.
+ Now that we have a different approach for B2G multilocale,
+ it's most likely misnamed. """
+ config_options = [[
+ ["--locale"],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to repack"
+ }
+ ], [
+ ["--merge-locales"],
+ {"action": "store_true",
+ "dest": "merge_locales",
+ "default": False,
+ "help": "Use default [en-US] if there are missing strings"
+ }
+ ], [
+ ["--no-merge-locales"],
+ {"action": "store_false",
+ "dest": "merge_locales",
+ "help": "Do not allow missing strings"
+ }
+ ], [
+ ["--objdir"],
+ {"action": "store",
+ "dest": "objdir",
+ "type": "string",
+ "default": "objdir",
+ "help": "Specify the objdir"
+ }
+ ], [
+ ["--l10n-base"],
+ {"action": "store",
+ "dest": "hg_l10n_base",
+ "type": "string",
+ "help": "Specify the L10n repo base directory"
+ }
+ ], [
+ ["--l10n-tag"],
+ {"action": "store",
+ "dest": "hg_l10n_tag",
+ "type": "string",
+ "help": "Specify the L10n tag"
+ }
+ ], [
+ ["--tag-override"],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"
+ }
+ ], [
+ ["--user-repo-override"],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"
+ }
+ ], [
+ ["--l10n-dir"],
+ {"action": "store",
+ "dest": "l10n_dir",
+ "type": "string",
+ "default": "l10n",
+ "help": "Specify the l10n dir name"
+ }
+ ]]
+
+ def __init__(self, require_config_file=True):
+ LocalesMixin.__init__(self)
+ MercurialScript.__init__(self, config_options=self.config_options,
+ all_actions=['clobber', 'pull-build-source',
+ 'pull-locale-source',
+ 'build', 'package-en-US',
+ 'upload-en-US',
+ 'backup-objdir',
+ 'restore-objdir',
+ 'add-locales', 'package-multi',
+ 'upload-multi', 'summary'],
+ require_config_file=require_config_file)
+
+ def query_l10n_env(self):
+ return self.query_env()
+
+ def clobber(self):
+ c = self.config
+ if c['work_dir'] != '.':
+ path = os.path.join(c['base_work_dir'], c['work_dir'])
+ if os.path.exists(path):
+ self.rmtree(path, error_level=FATAL)
+ else:
+ self.info("work_dir is '.'; skipping for now.")
+
+ def pull_build_source(self):
+ c = self.config
+ repos = []
+ replace_dict = {}
+ # Replace %(user_repo_override)s with c['user_repo_override']
+ if c.get("user_repo_override"):
+ replace_dict['user_repo_override'] = c['user_repo_override']
+ for repo_dict in deepcopy(c['repos']):
+ repo_dict['repo'] = repo_dict['repo'] % replace_dict
+ repos.append(repo_dict)
+ else:
+ repos = c['repos']
+ self.vcs_checkout_repos(repos, tag_override=c.get('tag_override'))
+
+ # pull_locale_source() defined in LocalesMixin.
+
+ def build(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ self.copyfile(os.path.join(dirs['abs_work_dir'], c['mozconfig']),
+ os.path.join(dirs['abs_mozilla_dir'], 'mozconfig'),
+ error_level=FATAL)
+ command = "make -f client.mk build"
+ env = self.query_env()
+ if self._process_command(command=command,
+ cwd=dirs['abs_mozilla_dir'],
+ env=env, error_list=MakefileErrorList):
+ self.fatal("Erroring out after the build failed.")
+
+ def add_locales(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+
+ for locale in locales:
+ self.run_compare_locales(locale, halt_on_failure=True)
+ command = 'make chrome-%s L10NBASEDIR=%s' % (locale, dirs['abs_l10n_dir'])
+ if c['merge_locales']:
+ command += " LOCALE_MERGEDIR=%s" % dirs['abs_merge_dir'].replace(os.sep, '/')
+ status = self._process_command(command=command,
+ cwd=dirs['abs_locales_dir'],
+ error_list=MakefileErrorList)
+ if status:
+ self.return_code += 1
+ self.add_summary("Failed to add locale %s!" % locale,
+ level="error")
+ else:
+ self.add_summary("Added locale %s successfully." % locale)
+
+ def package_en_US(self):
+ self.package(package_type='en-US')
+
+ def preflight_package_multi(self):
+ dirs = self.query_abs_dirs()
+ self.run_command("rm -rfv dist/fennec*", cwd=dirs['abs_objdir'])
+ # bug 933290
+ self.run_command(["touch", "mobile/android/installer/Makefile"], cwd=dirs['abs_objdir'])
+
+ def package_multi(self):
+ self.package(package_type='multi')
+
+ def additional_packaging(self, package_type='en-US', env=None):
+ dirs = self.query_abs_dirs()
+ command = "make package-tests"
+ if package_type == 'multi':
+ command += " AB_CD=multi"
+ self.run_command(command, cwd=dirs['abs_objdir'], env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+ # TODO deal with buildsymbols
+
+ def package(self, package_type='en-US'):
+ dirs = self.query_abs_dirs()
+
+ command = "make package"
+ env = self.query_env()
+ if env is None:
+ # This is for Maemo, where we don't want an env for builds
+ # but we do for packaging. self.query_env() will return None.
+ env = os.environ.copy()
+ if package_type == 'multi':
+ command += " AB_CD=multi"
+ env['MOZ_CHROME_MULTILOCALE'] = "en-US " + \
+ ' '.join(self.query_locales())
+ self.info("MOZ_CHROME_MULTILOCALE is %s" % env['MOZ_CHROME_MULTILOCALE'])
+ self._process_command(command=command, cwd=dirs['abs_objdir'],
+ env=env, error_list=MakefileErrorList,
+ halt_on_failure=True)
+ self.additional_packaging(package_type=package_type, env=env)
+
+ def upload_en_US(self):
+ # TODO
+ self.info("Not written yet.")
+
+ def backup_objdir(self):
+ dirs = self.query_abs_dirs()
+ if not os.path.isdir(dirs['abs_objdir']):
+ self.warning("%s doesn't exist! Skipping..." % dirs['abs_objdir'])
+ return
+ rsync = self.query_exe('rsync')
+ backup_dir = '%s-bak' % dirs['abs_objdir']
+ self.rmtree(backup_dir)
+ self.mkdir_p(backup_dir)
+ self.run_command([rsync, '-a', '--delete', '--partial',
+ '%s/' % dirs['abs_objdir'],
+ '%s/' % backup_dir],
+ error_list=SSHErrorList)
+
+ def restore_objdir(self):
+ dirs = self.query_abs_dirs()
+ rsync = self.query_exe('rsync')
+ backup_dir = '%s-bak' % dirs['abs_objdir']
+ if not os.path.isdir(dirs['abs_objdir']) or not os.path.isdir(backup_dir):
+ self.warning("Both %s and %s need to exist to restore the objdir! Skipping..." % (dirs['abs_objdir'], backup_dir))
+ return
+ self.run_command([rsync, '-a', '--delete', '--partial',
+ '%s/' % backup_dir,
+ '%s/' % dirs['abs_objdir']],
+ error_list=SSHErrorList)
+
+ def upload_multi(self):
+ # TODO
+ self.info("Not written yet.")
+
+ def _process_command(self, **kwargs):
+ """Stub wrapper function that allows us to call scratchbox in
+ MaemoMultiLocaleBuild.
+
+ """
+ return self.run_command(**kwargs)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ pass
diff --git a/testing/mozharness/mozharness/mozilla/mapper.py b/testing/mozharness/mozharness/mozilla/mapper.py
new file mode 100644
index 000000000..c5a2d4895
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/mapper.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Support for hg/git mapper
+"""
+import urllib2
+import time
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+
+class MapperMixin:
+ def query_mapper(self, mapper_url, project, vcs, rev,
+ require_answer=True, attempts=30, sleeptime=30,
+ project_name=None):
+ """
+ Returns the mapped revision for the target vcs via a mapper service
+
+ Args:
+ mapper_url (str): base url to use for the mapper service
+ project (str): The name of the mapper project to use for lookups
+ vcs (str): Which vcs you want the revision for. e.g. "git" to get
+ the git revision given an hg revision
+ rev (str): The original revision you want the mapping for.
+ require_answer (bool): Whether you require a valid answer or not.
+ If None is acceptable (meaning mapper doesn't know about the
+ revision you're asking about), then set this to False. If True,
+ then will return the revision, or cause a fatal error.
+ attempts (int): How many times to try to do the lookup
+ sleeptime (int): How long to sleep between attempts
+ project_name (str): Used for logging only to give a more
+ descriptive name to the project, otherwise just uses the
+ project parameter
+
+ Returns:
+ A revision string, or None
+ """
+ if project_name is None:
+ project_name = project
+ url = mapper_url.format(project=project, vcs=vcs, rev=rev)
+ self.info('Mapping %s revision to %s using %s' % (project_name, vcs, url))
+ n = 1
+ while n <= attempts:
+ try:
+ r = urllib2.urlopen(url, timeout=10)
+ j = json.loads(r.readline())
+ if j['%s_rev' % vcs] is None:
+ if require_answer:
+ raise Exception("Mapper returned a revision of None; maybe it needs more time.")
+ else:
+ self.warning("Mapper returned a revision of None. Accepting because require_answer is False.")
+ return j['%s_rev' % vcs]
+ except Exception, err:
+ self.warning('Error: %s' % str(err))
+ if n == attempts:
+ self.fatal('Giving up on %s %s revision for %s.' % (project_name, vcs, rev))
+ if sleeptime > 0:
+ self.info('Sleeping %i seconds before retrying' % sleeptime)
+ time.sleep(sleeptime)
+ continue
+ finally:
+ n += 1
+
+ def query_mapper_git_revision(self, url, project, rev, **kwargs):
+ """
+ Returns the git revision for the given hg revision `rev`
+ See query_mapper docs for supported parameters and docstrings
+ """
+ return self.query_mapper(url, project, "git", rev, **kwargs)
+
+ def query_mapper_hg_revision(self, url, project, rev, **kwargs):
+ """
+ Returns the hg revision for the given git revision `rev`
+ See query_mapper docs for supported parameters and docstrings
+ """
+ return self.query_mapper(url, project, "hg", rev, **kwargs)
diff --git a/testing/mozharness/mozharness/mozilla/mar.py b/testing/mozharness/mozharness/mozilla/mar.py
new file mode 100644
index 000000000..dbe3b96a0
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/mar.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""MarMixin, manages mar files"""
+
+import os
+import sys
+import ConfigParser
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+
+CONFIG = {
+ "buildid_section": 'App',
+ "buildid_option": "BuildID",
+}
+
+
+def query_ini_file(ini_file, section, option):
+ ini = ConfigParser.SafeConfigParser()
+ ini.read(ini_file)
+ return ini.get(section, option)
+
+
+def buildid_from_ini(ini_file):
+ """reads an ini_file and returns the buildid"""
+ return query_ini_file(ini_file,
+ CONFIG.get('buildid_section'),
+ CONFIG.get('buildid_option'))
+
+
+# MarMixin {{{1
+class MarMixin(object):
+ def _mar_tool_dir(self):
+ """returns the path or the mar tool directory"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs['abs_objdir'], config["local_mar_tool_dir"])
+
+ def _incremental_update_script(self):
+ """returns the path of incremental update script"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs['abs_mozilla_dir'],
+ config['incremental_update_script'])
+
+ def download_mar_tools(self):
+ """downloads mar tools executables (mar,mbsdiff)
+ and stores them local_dir()"""
+ self.info("getting mar tools")
+ dst_dir = self._mar_tool_dir()
+ self.mkdir_p(dst_dir)
+ config = self.config
+ replace_dict = {'platform': config['platform'],
+ 'branch': config['branch']}
+ url = config['mar_tools_url'] % replace_dict
+ binaries = (config['mar'], config['mbsdiff'])
+ for binary in binaries:
+ from_url = "/".join((url, binary))
+ full_path = os.path.join(dst_dir, binary)
+ if not os.path.exists(full_path):
+ self.download_file(from_url, file_name=full_path)
+ self.info("downloaded %s" % full_path)
+ else:
+ self.info("found %s, skipping download" % full_path)
+ self.chmod(full_path, 0755)
+
+ def _temp_mar_base_dir(self):
+ """a base dir for unpacking mars"""
+ dirs = self.query_abs_dirs()
+ return dirs['abs_objdir']
+
+ def _unpack_mar(self, mar_file, dst_dir):
+ """unpacks a mar file into dst_dir"""
+ cmd = ['perl', self._unpack_script(), mar_file]
+ env = self.query_bootstrap_env()
+ self.info("unpacking %s" % mar_file)
+ self.mkdir_p(dst_dir)
+ return self.run_command(cmd,
+ cwd=dst_dir,
+ env=env,
+ halt_on_failure=True)
+
+ def do_incremental_update(self, previous_dir, current_dir, partial_filename):
+ """create an incremental update from src_mar to dst_src.
+ It stores the result in partial_filename"""
+ # Usage: make_incremental_update.sh [OPTIONS] ARCHIVE FROMDIR TODIR
+ cmd = [self._incremental_update_script(), partial_filename,
+ previous_dir, current_dir]
+ env = self.query_bootstrap_env()
+ cwd = self._mar_dir('update_mar_dir')
+ self.mkdir_p(cwd)
+ result = self.run_command(cmd, cwd=cwd, env=env)
+ return result
+
+ def get_buildid_from_mar_dir(self, mar_unpack_dir):
+ """returns the buildid of the current mar file"""
+ config = self.config
+ ini_file = config['application_ini']
+ ini_file = os.path.join(mar_unpack_dir, ini_file)
+ self.info("application.ini file: %s" % ini_file)
+
+ # log the content of application.ini
+ with self.opened(ini_file, 'r') as (ini, error):
+ if error:
+ self.fatal('cannot open %s' % ini_file)
+ self.debug(ini.read())
+ return buildid_from_ini(ini_file)
diff --git a/testing/mozharness/mozharness/mozilla/mock.py b/testing/mozharness/mozharness/mozilla/mock.py
new file mode 100644
index 000000000..f8587c0d6
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/mock.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Code to integrate with mock
+"""
+
+import os.path
+import hashlib
+import subprocess
+import os
+
+ERROR_MSGS = {
+ 'undetermined_buildroot_lock': 'buildroot_lock_path does not exist.\
+Nothing to remove.'
+}
+
+
+
+# MockMixin {{{1
+class MockMixin(object):
+ """Provides methods to setup and interact with mock environments.
+ https://wiki.mozilla.org/ReleaseEngineering/Applications/Mock
+
+ This is dependent on ScriptMixin
+ """
+ done_mock_setup = False
+ mock_enabled = False
+ default_mock_target = None
+
+ def init_mock(self, mock_target):
+ "Initialize mock environment defined by `mock_target`"
+ cmd = ['mock_mozilla', '-r', mock_target, '--init']
+ return super(MockMixin, self).run_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def install_mock_packages(self, mock_target, packages):
+ "Install `packages` into mock environment `mock_target`"
+ cmd = ['mock_mozilla', '-r', mock_target, '--install'] + packages
+ # TODO: parse output to see if packages actually were installed
+ return super(MockMixin, self).run_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def delete_mock_files(self, mock_target, files):
+ """Delete files from the mock environment `mock_target`. `files` should
+ be an iterable of 2-tuples: (src, dst). Only the dst component is
+ deleted."""
+ cmd_base = ['mock_mozilla', '-r', mock_target, '--shell']
+ for src, dest in files:
+ cmd = cmd_base + ['rm -rf %s' % dest]
+ super(MockMixin, self).run_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def copy_mock_files(self, mock_target, files):
+ """Copy files into the mock environment `mock_target`. `files` should
+ be an iterable of 2-tuples: (src, dst)"""
+ cmd_base = ['mock_mozilla', '-r', mock_target, '--copyin', '--unpriv']
+ for src, dest in files:
+ cmd = cmd_base + [src, dest]
+ super(MockMixin, self).run_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+ super(MockMixin, self).run_command(
+ ['mock_mozilla', '-r', mock_target, '--shell',
+ 'chown -R mock_mozilla %s' % dest],
+ halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def get_mock_target(self):
+ if self.config.get('disable_mock'):
+ return None
+ return self.default_mock_target or self.config.get('mock_target')
+
+ def enable_mock(self):
+ """Wrap self.run_command and self.get_output_from_command to run inside
+ the mock environment given by self.config['mock_target']"""
+ if not self.get_mock_target():
+ return
+ self.mock_enabled = True
+ self.run_command = self.run_command_m
+ self.get_output_from_command = self.get_output_from_command_m
+
+ def disable_mock(self):
+ """Restore self.run_command and self.get_output_from_command to their
+ original versions. This is the opposite of self.enable_mock()"""
+ if not self.get_mock_target():
+ return
+ self.mock_enabled = False
+ self.run_command = super(MockMixin, self).run_command
+ self.get_output_from_command = super(MockMixin, self).get_output_from_command
+
+ def _do_mock_command(self, func, mock_target, command, cwd=None, env=None, **kwargs):
+ """Internal helper for preparing commands to run under mock. Used by
+ run_mock_command and get_mock_output_from_command."""
+ cmd = ['mock_mozilla', '-r', mock_target, '-q']
+ if cwd:
+ cmd += ['--cwd', cwd]
+
+ if not kwargs.get('privileged'):
+ cmd += ['--unpriv']
+ cmd += ['--shell']
+
+ if not isinstance(command, basestring):
+ command = subprocess.list2cmdline(command)
+
+ # XXX - Hack - gets around AB_CD=%(locale)s type arguments
+ command = command.replace("(", "\\(")
+ command = command.replace(")", "\\)")
+
+ if env:
+ env_cmd = ['/usr/bin/env']
+ for key, value in env.items():
+ # $HOME won't work inside the mock chroot
+ if key == 'HOME':
+ continue
+ value = value.replace(";", "\\;")
+ env_cmd += ['%s=%s' % (key, value)]
+ cmd.append(subprocess.list2cmdline(env_cmd) + " " + command)
+ else:
+ cmd.append(command)
+ return func(cmd, cwd=cwd, **kwargs)
+
+ def run_mock_command(self, mock_target, command, cwd=None, env=None, **kwargs):
+ """Same as ScriptMixin.run_command, except runs command inside mock
+ environment `mock_target`."""
+ return self._do_mock_command(
+ super(MockMixin, self).run_command,
+ mock_target, command, cwd, env, **kwargs)
+
+ def get_mock_output_from_command(self, mock_target, command, cwd=None, env=None, **kwargs):
+ """Same as ScriptMixin.get_output_from_command, except runs command
+ inside mock environment `mock_target`."""
+ return self._do_mock_command(
+ super(MockMixin, self).get_output_from_command,
+ mock_target, command, cwd, env, **kwargs)
+
+ def reset_mock(self, mock_target=None):
+ """rm mock lock and reset"""
+ c = self.config
+ if mock_target is None:
+ if not c.get('mock_target'):
+ self.fatal("Cound not determine: 'mock_target'")
+ mock_target = c.get('mock_target')
+ buildroot_lock_path = os.path.join(c.get('mock_mozilla_dir', ''),
+ mock_target,
+ 'buildroot.lock')
+ self.info("Removing buildroot lock at path if exists:O")
+ self.info(buildroot_lock_path)
+ if not os.path.exists(buildroot_lock_path):
+ self.info(ERROR_MSGS['undetermined_buildroot_lock'])
+ else:
+ rm_lock_cmd = ['rm', '-f', buildroot_lock_path]
+ super(MockMixin, self).run_command(rm_lock_cmd,
+ halt_on_failure=True,
+ fatal_exit_code=3)
+ cmd = ['mock_mozilla', '-r', mock_target, '--orphanskill']
+ return super(MockMixin, self).run_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def setup_mock(self, mock_target=None, mock_packages=None, mock_files=None):
+ """Initializes and installs packages, copies files into mock
+ environment given by configuration in self.config. The mock
+ environment is given by self.config['mock_target'], the list of packges
+ to install given by self.config['mock_packages'], and the list of files
+ to copy in is self.config['mock_files']."""
+ if self.done_mock_setup or self.config.get('disable_mock'):
+ return
+
+ c = self.config
+
+ if mock_target is None:
+ assert 'mock_target' in c
+ t = c['mock_target']
+ else:
+ t = mock_target
+ self.default_mock_target = t
+
+ # Don't re-initialize mock if we're using the same packages as before
+ # Put the cache inside the mock root so that if somebody else resets
+ # the environment, it invalidates the cache
+ mock_root = super(MockMixin, self).get_output_from_command(
+ ['mock_mozilla', '-r', t, '--print-root-path']
+ )
+ package_hash_file = os.path.join(mock_root, "builds/package_list.hash")
+ if os.path.exists(package_hash_file):
+ old_packages_hash = self.read_from_file(package_hash_file)
+ self.info("old package hash: %s" % old_packages_hash)
+ else:
+ self.info("no previous package list found")
+ old_packages_hash = None
+
+ if mock_packages is None:
+ mock_packages = list(c.get('mock_packages'))
+
+ package_list_hash = hashlib.new('sha1')
+ if mock_packages:
+ for p in sorted(mock_packages):
+ package_list_hash.update(p)
+ package_list_hash = package_list_hash.hexdigest()
+
+ did_init = True
+ # This simple hash comparison doesn't take into account depedency
+ # changes. If you really care about dependencies, then they should be
+ # explicitly listed in the package list.
+ if old_packages_hash != package_list_hash:
+ self.init_mock(t)
+ else:
+ self.info("Our list of packages hasn't changed; skipping re-initialization")
+ did_init = False
+
+ # Still try and install packages here since the package version may
+ # have been updated on the server
+ if mock_packages:
+ self.install_mock_packages(t, mock_packages)
+
+ # Save our list of packages
+ self.write_to_file(package_hash_file,
+ package_list_hash)
+
+ if mock_files is None:
+ mock_files = list(c.get('mock_files'))
+ if mock_files:
+ if not did_init:
+ # mock complains if you try and copy in files that already
+ # exist, so we need to delete them here first
+ self.info("Deleting old mock files")
+ self.delete_mock_files(t, mock_files)
+ self.copy_mock_files(t, mock_files)
+
+ self.done_mock_setup = True
+
+ def run_command_m(self, *args, **kwargs):
+ """Executes self.run_mock_command if we have a mock target set,
+ otherwise executes self.run_command."""
+ mock_target = self.get_mock_target()
+ if mock_target:
+ self.setup_mock()
+ return self.run_mock_command(mock_target, *args, **kwargs)
+ else:
+ return super(MockMixin, self).run_command(*args, **kwargs)
+
+ def get_output_from_command_m(self, *args, **kwargs):
+ """Executes self.get_mock_output_from_command if we have a mock target
+ set, otherwise executes self.get_output_from_command."""
+ mock_target = self.get_mock_target()
+ if mock_target:
+ self.setup_mock()
+ return self.get_mock_output_from_command(mock_target, *args, **kwargs)
+ else:
+ return super(MockMixin, self).get_output_from_command(*args, **kwargs)
diff --git a/testing/mozharness/mozharness/mozilla/mozbase.py b/testing/mozharness/mozharness/mozilla/mozbase.py
new file mode 100644
index 000000000..0201687d1
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/mozbase.py
@@ -0,0 +1,39 @@
+import os
+from mozharness.base.script import PreScriptAction
+
+
+class MozbaseMixin(object):
+ """Automatically set virtualenv requirements to use mozbase
+ from test package.
+ """
+ def __init__(self, *args, **kwargs):
+ super(MozbaseMixin, self).__init__(*args, **kwargs)
+
+ @PreScriptAction('create-virtualenv')
+ def _install_mozbase(self, action):
+ dirs = self.query_abs_dirs()
+
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'mozbase_requirements.txt')
+ if os.path.isfile(requirements):
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+ return
+
+ # XXX Bug 879765: Dependent modules need to be listed before parent
+ # modules, otherwise they will get installed from the pypi server.
+ # XXX Bug 908356: This block can be removed as soon as the
+ # in-tree requirements files propagate to all active trees.
+ mozbase_dir = os.path.join('tests', 'mozbase')
+ self.register_virtualenv_module(
+ 'manifestparser',
+ url=os.path.join(mozbase_dir, 'manifestdestiny')
+ )
+
+ for m in ('mozfile', 'mozlog', 'mozinfo', 'moznetwork', 'mozhttpd',
+ 'mozcrash', 'mozinstall', 'mozdevice', 'mozprofile',
+ 'mozprocess', 'mozrunner'):
+ self.register_virtualenv_module(
+ m, url=os.path.join(mozbase_dir, m)
+ )
diff --git a/testing/mozharness/mozharness/mozilla/proxxy.py b/testing/mozharness/mozharness/mozilla/proxxy.py
new file mode 100644
index 000000000..b9f14d5f2
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/proxxy.py
@@ -0,0 +1,167 @@
+"""Proxxy module. Defines a Proxxy element that fetches files using local
+ proxxy instances (if available). The goal of Proxxy is to lower the traffic
+ from the cloud to internal servers.
+"""
+import urlparse
+import socket
+from mozharness.base.log import INFO, ERROR, LogMixin
+from mozharness.base.script import ScriptMixin
+
+
+# Proxxy {{{1
+class Proxxy(ScriptMixin, LogMixin):
+ """
+ Support downloading files from HTTP caching proxies
+
+ Current supports 'proxxy' instances, in which the caching proxy at
+ proxxy.domain.com will cache requests for ftp.mozilla.org when passed requests to
+ http://ftp.mozilla.org.proxxy.domain.com/...
+
+ self.config['proxxy']['urls'] defines the list of backend hosts we are currently caching, and
+ the hostname prefix to use for proxxy
+
+ self.config['proxxy']['instances'] lists current hostnames for proxxy instances. wildcard DNS
+ is set up so that *.proxxy.domain.com is a CNAME to the proxxy instance
+ """
+
+ # Default configuration. Can be overridden via self.config
+ PROXXY_CONFIG = {
+ "urls": [
+ ('http://ftp.mozilla.org', 'ftp.mozilla.org'),
+ ('https://ftp.mozilla.org', 'ftp.mozilla.org'),
+ ('https://ftp-ssl.mozilla.org', 'ftp.mozilla.org'),
+ # pypi
+ ('http://pypi.pvt.build.mozilla.org', 'pypi.pvt.build.mozilla.org'),
+ ('http://pypi.pub.build.mozilla.org', 'pypi.pub.build.mozilla.org'),
+ ],
+ "instances": [
+ 'proxxy1.srv.releng.use1.mozilla.com',
+ 'proxxy1.srv.releng.usw2.mozilla.com',
+ ],
+ "regions": [".use1.", ".usw2."],
+ }
+
+ def __init__(self, config, log_obj):
+ # proxxy does not need the need the full configuration,
+ # just the 'proxxy' element
+ # if configuration has no 'proxxy' section use the default
+ # configuration instead
+ default_config = {} if self.is_taskcluster() else self.PROXXY_CONFIG
+ self.config = config.get('proxxy', default_config)
+ self.log_obj = log_obj
+
+ def get_proxies_for_url(self, url):
+ """Maps url to its proxxy urls
+
+ Args:
+ url (str): url to be proxxied
+ Returns:
+ list: of proxy URLs to try, in sorted order.
+ please note that url is NOT included in this list.
+ """
+ config = self.config
+ urls = []
+
+ self.info("proxxy config: %s" % config)
+
+ proxxy_urls = config.get('urls', [])
+ proxxy_instances = config.get('instances', [])
+
+ url_parts = urlparse.urlsplit(url)
+ url_path = url_parts.path
+ if url_parts.query:
+ url_path += "?" + url_parts.query
+ if url_parts.fragment:
+ url_path += "#" + url_parts.fragment
+
+ for prefix, target in proxxy_urls:
+ if url.startswith(prefix):
+ self.info("%s matches %s" % (url, prefix))
+ for instance in proxxy_instances:
+ if not self.query_is_proxxy_local(instance):
+ continue
+ new_url = "http://%s.%s%s" % (target, instance, url_path)
+ urls.append(new_url)
+
+ for url in urls:
+ self.info("URL Candidate: %s" % url)
+ return urls
+
+ def get_proxies_and_urls(self, urls):
+ """Gets a list of urls and returns a list of proxied urls, the list
+ of input urls is appended at the end of the return values
+
+ Args:
+ urls (list, tuple): urls to be mapped to proxxy urls
+
+ Returns:
+ list: proxxied urls and urls. urls are appended to the proxxied
+ urls list and they are the last elements of the list.
+ """
+ proxxy_list = []
+ for url in urls:
+ # get_proxies_for_url returns always a list...
+ proxxy_list.extend(self.get_proxies_for_url(url))
+ proxxy_list.extend(urls)
+ return proxxy_list
+
+ def query_is_proxxy_local(self, url):
+ """Checks is url is 'proxxable' for the local instance
+
+ Args:
+ url (string): url to check
+
+ Returns:
+ bool: True if url maps to a usable proxxy,
+ False in any other case
+ """
+ fqdn = socket.getfqdn()
+ config = self.config
+ regions = config.get('regions', [])
+
+ return any(r in fqdn and r in url for r in regions)
+
+ def download_proxied_file(self, url, file_name, parent_dir=None,
+ create_parent_dir=True, error_level=ERROR,
+ exit_code=3):
+ """
+ Wrapper around BaseScript.download_file that understands proxies
+ retry dict is set to 3 attempts, sleeping time 30 seconds.
+
+ Args:
+ url (string): url to fetch
+ file_name (string, optional): output filename, defaults to None
+ if file_name is not defined, the output name is taken from
+ the url.
+ parent_dir (string, optional): name of the parent directory
+ create_parent_dir (bool, optional): if True, creates the parent
+ directory. Defaults to True
+ error_level (mozharness log level, optional): log error level
+ defaults to ERROR
+ exit_code (int, optional): return code to log if file_name
+ is not defined and it cannot be determined from the url
+ Returns:
+ string: file_name if the download has succeded, None in case of
+ error. In case of error, if error_level is set to FATAL,
+ this method interrupts the execution of the script
+
+ """
+ urls = self.get_proxies_and_urls([url])
+
+ for url in urls:
+ self.info("trying %s" % url)
+ retval = self.download_file(
+ url, file_name=file_name, parent_dir=parent_dir,
+ create_parent_dir=create_parent_dir, error_level=ERROR,
+ exit_code=exit_code,
+ retry_config=dict(
+ attempts=3,
+ sleeptime=30,
+ error_level=INFO,
+ ))
+ if retval:
+ return retval
+
+ self.log("Failed to download from all available URLs, aborting",
+ level=error_level, exit_code=exit_code)
+ return retval
diff --git a/testing/mozharness/mozharness/mozilla/purge.py b/testing/mozharness/mozharness/mozilla/purge.py
new file mode 100644
index 000000000..23ffd9081
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/purge.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Purge/clobber support
+"""
+
+# Figure out where our external_tools are
+# These are in a sibling directory to the 'mozharness' module
+import os
+import mozharness
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+from mozharness.base.log import ERROR
+
+
+# PurgeMixin {{{1
+# Depends on ScriptMixin for self.run_command,
+# and BuildbotMixin for self.buildbot_config and self.query_is_nightly()
+class PurgeMixin(object):
+ clobber_tool = os.path.join(external_tools_path, 'clobberer.py')
+
+ default_skips = ['info', 'rel-*', 'tb-rel-*']
+ default_maxage = 14
+ default_periodic_clobber = 7 * 24
+
+ def clobberer(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ if not self.buildbot_config:
+ self.fatal("clobberer requires self.buildbot_config (usually from $PROPERTIES_FILE)")
+
+ periodic_clobber = c.get('periodic_clobber') or self.default_periodic_clobber
+ clobberer_url = c['clobberer_url']
+
+ builddir = os.path.basename(dirs['base_work_dir'])
+ branch = self.buildbot_config['properties']['branch']
+ buildername = self.buildbot_config['properties']['buildername']
+ slave = self.buildbot_config['properties']['slavename']
+ master = self.buildbot_config['properties']['master']
+
+ cmd = []
+ if self._is_windows():
+ # The virtualenv isn't setup yet, so just use python directly.
+ cmd.append(self.query_exe('python'))
+ # Add --dry-run if you don't want to do this for realz
+ cmd.extend([self.clobber_tool])
+ # TODO configurable list
+ cmd.extend(['-s', 'scripts'])
+ cmd.extend(['-s', 'logs'])
+ cmd.extend(['-s', 'buildprops.json'])
+ cmd.extend(['-s', 'token'])
+ cmd.extend(['-s', 'oauth.txt'])
+
+ if periodic_clobber:
+ cmd.extend(['-t', str(periodic_clobber)])
+
+ cmd.extend([clobberer_url, branch, buildername, builddir, slave, master])
+ error_list = [{
+ 'substr': 'Error contacting server', 'level': ERROR,
+ 'explanation': 'Error contacting server for clobberer information.'
+ }]
+
+ retval = self.retry(self.run_command, attempts=3, good_statuses=(0,), args=[cmd],
+ kwargs={'cwd':os.path.dirname(dirs['base_work_dir']),
+ 'error_list':error_list})
+ if retval != 0:
+ self.fatal("failed to clobber build", exit_code=2)
+
+ def clobber(self, always_clobber_dirs=None):
+ """ Mozilla clobberer-type clobber.
+ """
+ c = self.config
+ if c.get('developer_mode'):
+ self.info("Suppressing clobber in developer mode for safety.")
+ return
+ if c.get('is_automation'):
+ # Nightly builds always clobber
+ do_clobber = False
+ if self.query_is_nightly():
+ self.info("Clobbering because we're a nightly build")
+ do_clobber = True
+ if c.get('force_clobber'):
+ self.info("Clobbering because our config forced us to")
+ do_clobber = True
+ if do_clobber:
+ super(PurgeMixin, self).clobber()
+ else:
+ # Delete the upload dir so we don't upload previous stuff by
+ # accident
+ if always_clobber_dirs is None:
+ always_clobber_dirs = []
+ for path in always_clobber_dirs:
+ self.rmtree(path)
+ if 'clobberer_url' in c and c.get('use_clobberer', True):
+ self.clobberer()
+ else:
+ super(PurgeMixin, self).clobber()
diff --git a/testing/mozharness/mozharness/mozilla/release.py b/testing/mozharness/mozharness/mozilla/release.py
new file mode 100755
index 000000000..52a84cdba
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/release.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""release.py
+
+"""
+
+import os
+from distutils.version import LooseVersion, StrictVersion
+
+from mozharness.base.config import parse_config_file
+
+
+# SignAndroid {{{1
+class ReleaseMixin():
+ release_config = {}
+
+ def query_release_config(self):
+ if self.release_config:
+ return self.release_config
+ c = self.config
+ dirs = self.query_abs_dirs()
+ if c.get("release_config_file"):
+ self.info("Getting release config from %s..." % c["release_config_file"])
+ rc = None
+ try:
+ rc = parse_config_file(
+ os.path.join(dirs['abs_work_dir'],
+ c["release_config_file"]),
+ config_dict_name="releaseConfig"
+ )
+ except IOError:
+ self.fatal("Release config file %s not found!" % c["release_config_file"])
+ except RuntimeError:
+ self.fatal("Invalid release config file %s!" % c["release_config_file"])
+ self.release_config['version'] = rc['version']
+ self.release_config['buildnum'] = rc['buildNumber']
+ self.release_config['ftp_server'] = rc['stagingServer']
+ self.release_config['ftp_user'] = c.get('ftp_user', rc['hgUsername'])
+ self.release_config['ftp_ssh_key'] = c.get('ftp_ssh_key', rc['hgSshKey'])
+ self.release_config['release_channel'] = rc['releaseChannel']
+ else:
+ self.info("No release config file; using default config.")
+ for key in ('version', 'buildnum',
+ 'ftp_server', 'ftp_user', 'ftp_ssh_key'):
+ self.release_config[key] = c[key]
+ self.info("Release config:\n%s" % self.release_config)
+ return self.release_config
+
+
+def get_previous_version(version, partial_versions):
+ """ The patcher config bumper needs to know the exact previous version
+ We use LooseVersion for ESR because StrictVersion can't parse the trailing
+ 'esr', but StrictVersion otherwise because it can sort X.0bN lower than X.0.
+ The current version is excluded to avoid an error if build1 is aborted
+ before running the updates builder and now we're doing build2
+ """
+ if version.endswith('esr'):
+ return str(max(LooseVersion(v) for v in partial_versions if
+ v != version))
+ else:
+ # StrictVersion truncates trailing zero in versions with more than 1
+ # dot. Compose a structure that will be sorted by StrictVersion and
+ # return untouched version
+ composed = sorted([(v, StrictVersion(v)) for v in partial_versions if
+ v != version], key=lambda x: x[1], reverse=True)
+ return composed[0][0]
+
+
diff --git a/testing/mozharness/mozharness/mozilla/repo_manifest.py b/testing/mozharness/mozharness/mozilla/repo_manifest.py
new file mode 100644
index 000000000..2ffb34fe9
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/repo_manifest.py
@@ -0,0 +1,226 @@
+"""
+Module for handling repo style XML manifests
+"""
+import xml.dom.minidom
+import os
+import re
+
+
+def load_manifest(filename):
+ """
+ Loads manifest from `filename` and returns a single flattened manifest
+ Processes any <include name="..." /> nodes recursively
+ Removes projects referenced by <remove-project name="..." /> nodes
+ Abort on unsupported manifest tags
+ Returns the root node of the resulting DOM
+ """
+ doc = xml.dom.minidom.parse(filename)
+
+ # Check that we don't have any unsupported tags
+ to_visit = list(doc.childNodes)
+ while to_visit:
+ node = to_visit.pop()
+ # Skip text nodes
+ if node.nodeType in (node.TEXT_NODE, node.COMMENT_NODE):
+ continue
+
+ if node.tagName not in ('include', 'project', 'remote', 'default', 'manifest', 'copyfile', 'remove-project'):
+ raise ValueError("Unsupported tag: %s" % node.tagName)
+ to_visit.extend(node.childNodes)
+
+ # Find all <include> nodes
+ for i in doc.getElementsByTagName('include'):
+ p = i.parentNode
+
+ # The name attribute is relative to where the original manifest lives
+ inc_filename = i.getAttribute('name')
+ inc_filename = os.path.join(os.path.dirname(filename), inc_filename)
+
+ # Parse the included file
+ inc_doc = load_manifest(inc_filename).documentElement
+ # For all the child nodes in the included manifest, insert into our
+ # manifest just before the include node
+ # We operate on a copy of childNodes because when we reparent `c`, the
+ # list of childNodes is modified.
+ for c in inc_doc.childNodes[:]:
+ p.insertBefore(c, i)
+ # Now we can remove the include node
+ p.removeChild(i)
+
+ # Remove all projects referenced by <remove-project>
+ projects = {}
+ manifest = doc.documentElement
+ to_remove = []
+ for node in manifest.childNodes:
+ # Skip text nodes
+ if node.nodeType in (node.TEXT_NODE, node.COMMENT_NODE):
+ continue
+
+ if node.tagName == 'project':
+ projects[node.getAttribute('name')] = node
+
+ elif node.tagName == 'remove-project':
+ project_node = projects[node.getAttribute('name')]
+ to_remove.append(project_node)
+ to_remove.append(node)
+
+ for r in to_remove:
+ r.parentNode.removeChild(r)
+
+ return doc
+
+
+def rewrite_remotes(manifest, mapping_func, force_all=True):
+ """
+ Rewrite manifest remotes in place
+ Returns the same manifest, with the remotes transformed by mapping_func
+ mapping_func should return a modified remote node, or None if no changes
+ are required
+ If force_all is True, then it is an error for mapping_func to return None;
+ a ValueError is raised in this case
+ """
+ for r in manifest.getElementsByTagName('remote'):
+ m = mapping_func(r)
+ if not m:
+ if force_all:
+ raise ValueError("Wasn't able to map %s" % r.toxml())
+ continue
+
+ r.parentNode.replaceChild(m, r)
+
+
+def add_project(manifest, name, path, remote=None, revision=None):
+ """
+ Adds a project to the manifest in place
+ """
+
+ project = manifest.createElement("project")
+ project.setAttribute('name', name)
+ project.setAttribute('path', path)
+ if remote:
+ project.setAttribute('remote', remote)
+ if revision:
+ project.setAttribute('revision', revision)
+
+ manifest.documentElement.appendChild(project)
+
+
+def remove_project(manifest, name=None, path=None):
+ """
+ Removes a project from manifest.
+ One of name or path must be set. If path is specified, then the project
+ with the given path is removed, otherwise the project with the given name
+ is removed.
+ """
+ assert name or path
+ node = get_project(manifest, name, path)
+ if node:
+ node.parentNode.removeChild(node)
+ return node
+
+
+def get_project(manifest, name=None, path=None):
+ """
+ Gets a project node from the manifest.
+ One of name or path must be set. If path is specified, then the project
+ with the given path is returned, otherwise the project with the given name
+ is returned.
+ """
+ assert name or path
+ for node in manifest.getElementsByTagName('project'):
+ if path is not None and node.getAttribute('path') == path:
+ return node
+ if node.getAttribute('name') == name:
+ return node
+
+
+def get_remote(manifest, name):
+ for node in manifest.getElementsByTagName('remote'):
+ if node.getAttribute('name') == name:
+ return node
+
+
+def get_default(manifest):
+ default = manifest.getElementsByTagName('default')[0]
+ return default
+
+
+def get_project_remote_url(manifest, project):
+ """
+ Gets the remote URL for the given project node. Will return the default
+ remote if the project doesn't explicitly specify one.
+ """
+ if project.hasAttribute('remote'):
+ remote = get_remote(manifest, project.getAttribute('remote'))
+ else:
+ default = get_default(manifest)
+ remote = get_remote(manifest, default.getAttribute('remote'))
+ fetch = remote.getAttribute('fetch')
+ if not fetch.endswith('/'):
+ fetch += '/'
+ return "%s%s" % (fetch, project.getAttribute('name'))
+
+
+def get_project_revision(manifest, project):
+ """
+ Gets the revision for the given project node. Will return the default
+ revision if the project doesn't explicitly specify one.
+ """
+ if project.hasAttribute('revision'):
+ return project.getAttribute('revision')
+ else:
+ default = get_default(manifest)
+ return default.getAttribute('revision')
+
+
+def remove_group(manifest, group):
+ """
+ Removes all projects with groups=`group`
+ """
+ retval = []
+ for node in manifest.getElementsByTagName('project'):
+ if group in node.getAttribute('groups').split(","):
+ node.parentNode.removeChild(node)
+ retval.append(node)
+ return retval
+
+
+def map_remote(r, mappings):
+ """
+ Helper function for mapping git remotes
+ """
+ remote = r.getAttribute('fetch')
+ if remote in mappings:
+ r.setAttribute('fetch', mappings[remote])
+ # Add a comment about where our original remote was
+ comment = r.ownerDocument.createComment("original fetch url was %s" % remote)
+ line = r.ownerDocument.createTextNode("\n")
+ r.parentNode.insertBefore(comment, r)
+ r.parentNode.insertBefore(line, r)
+ return r
+ return None
+
+
+COMMIT_PATTERN = re.compile("[0-9a-f]{40}")
+
+
+def is_commitid(revision):
+ """
+ Returns True if revision looks like a commit id
+ i.e. 40 character string made up of 0-9a-f
+ """
+ return bool(re.match(COMMIT_PATTERN, revision))
+
+
+def cleanup(manifest, depth=0):
+ """
+ Remove any empty text nodes
+ """
+ for n in manifest.childNodes[:]:
+ if n.childNodes:
+ n.normalize()
+ if n.nodeType == n.TEXT_NODE and not n.data.strip():
+ if not n.nextSibling:
+ depth -= 2
+ n.data = "\n" + (" " * depth)
+ cleanup(n, depth + 2)
diff --git a/testing/mozharness/mozharness/mozilla/repo_manupulation.py b/testing/mozharness/mozharness/mozilla/repo_manupulation.py
new file mode 100644
index 000000000..a2dfc46a2
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/repo_manupulation.py
@@ -0,0 +1,164 @@
+import os
+
+from mozharness.base.errors import HgErrorList
+from mozharness.base.log import FATAL, INFO
+from mozharness.base.vcs.mercurial import MercurialVCS
+
+
+class MercurialRepoManipulationMixin(object):
+
+ def get_version(self, repo_root,
+ version_file="browser/config/version.txt"):
+ version_path = os.path.join(repo_root, version_file)
+ contents = self.read_from_file(version_path, error_level=FATAL)
+ lines = [l for l in contents.splitlines() if l and
+ not l.startswith("#")]
+ return lines[-1].split(".")
+
+ def replace(self, file_name, from_, to_):
+ """ Replace text in a file.
+ """
+ text = self.read_from_file(file_name, error_level=FATAL)
+ new_text = text.replace(from_, to_)
+ if text == new_text:
+ self.fatal("Cannot replace '%s' to '%s' in '%s'" %
+ (from_, to_, file_name))
+ self.write_to_file(file_name, new_text, error_level=FATAL)
+
+ def query_hg_revision(self, path):
+ """ Avoid making 'pull' a required action every run, by being able
+ to fall back to figuring out the revision from the cloned repo
+ """
+ m = MercurialVCS(log_obj=self.log_obj, config=self.config)
+ revision = m.get_revision_from_path(path)
+ return revision
+
+ def hg_commit(self, cwd, message, user=None, ignore_no_changes=False):
+ """ Commit changes to hg.
+ """
+ cmd = self.query_exe('hg', return_type='list') + [
+ 'commit', '-m', message]
+ if user:
+ cmd.extend(['-u', user])
+ success_codes = [0]
+ if ignore_no_changes:
+ success_codes.append(1)
+ self.run_command(
+ cmd, cwd=cwd, error_list=HgErrorList,
+ halt_on_failure=True,
+ success_codes=success_codes
+ )
+ return self.query_hg_revision(cwd)
+
+ def clean_repos(self):
+ """ We may end up with contaminated local repos at some point, but
+ we don't want to have to clobber and reclone from scratch every
+ time.
+
+ This is an attempt to clean up the local repos without needing a
+ clobber.
+ """
+ dirs = self.query_abs_dirs()
+ hg = self.query_exe("hg", return_type="list")
+ hg_repos = self.query_repos()
+ hg_strip_error_list = [{
+ 'substr': r'''abort: empty revision set''', 'level': INFO,
+ 'explanation': "Nothing to clean up; we're good!",
+ }] + HgErrorList
+ for repo_config in hg_repos:
+ repo_name = repo_config["dest"]
+ repo_path = os.path.join(dirs['abs_work_dir'], repo_name)
+ if os.path.exists(repo_path):
+ # hg up -C to discard uncommitted changes
+ self.run_command(
+ hg + ["up", "-C", "-r", repo_config['branch']],
+ cwd=repo_path,
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+ # discard unpushed commits
+ status = self.retry(
+ self.run_command,
+ args=(hg + ["--config", "extensions.mq=", "strip",
+ "--no-backup", "outgoing()"], ),
+ kwargs={
+ 'cwd': repo_path,
+ 'error_list': hg_strip_error_list,
+ 'return_type': 'num_errors',
+ 'success_codes': (0, 255),
+ },
+ )
+ if status not in [0, 255]:
+ self.fatal("Issues stripping outgoing revisions!")
+ # 2nd hg up -C to make sure we're not on a stranded head
+ # which can happen when reverting debugsetparents
+ self.run_command(
+ hg + ["up", "-C", "-r", repo_config['branch']],
+ cwd=repo_path,
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+
+ def commit_changes(self):
+ """ Do the commit.
+ """
+ hg = self.query_exe("hg", return_type="list")
+ for cwd in self.query_commit_dirs():
+ self.run_command(hg + ["diff"], cwd=cwd)
+ self.hg_commit(
+ cwd, user=self.config['hg_user'],
+ message=self.query_commit_message(),
+ ignore_no_changes=self.config.get("ignore_no_changes", False)
+ )
+ self.info("Now verify |hg out| and |hg out --patch| if you're paranoid, and --push")
+
+ def hg_tag(self, cwd, tags, user=None, message=None, revision=None,
+ force=None, halt_on_failure=True):
+ if isinstance(tags, basestring):
+ tags = [tags]
+ cmd = self.query_exe('hg', return_type='list') + ['tag']
+ if not message:
+ message = "No bug - Tagging %s" % os.path.basename(cwd)
+ if revision:
+ message = "%s %s" % (message, revision)
+ message = "%s with %s" % (message, ', '.join(tags))
+ message += " a=release DONTBUILD CLOSED TREE"
+ self.info(message)
+ cmd.extend(['-m', message])
+ if user:
+ cmd.extend(['-u', user])
+ if revision:
+ cmd.extend(['-r', revision])
+ if force:
+ cmd.append('-f')
+ cmd.extend(tags)
+ return self.run_command(
+ cmd, cwd=cwd, halt_on_failure=halt_on_failure,
+ error_list=HgErrorList
+ )
+
+ def push(self):
+ """
+ """
+ error_message = """Push failed! If there was a push race, try rerunning
+the script (--clean-repos --pull --migrate). The second run will be faster."""
+ hg = self.query_exe("hg", return_type="list")
+ for cwd in self.query_push_dirs():
+ if not cwd:
+ self.warning("Skipping %s" % cwd)
+ continue
+ push_cmd = hg + ['push'] + self.query_push_args(cwd)
+ if self.config.get("push_dest"):
+ push_cmd.append(self.config["push_dest"])
+ status = self.run_command(
+ push_cmd,
+ cwd=cwd,
+ error_list=HgErrorList,
+ success_codes=[0, 1],
+ )
+ if status == 1:
+ self.warning("No changes for %s!" % cwd)
+ elif status:
+ self.fatal(error_message)
+
+
diff --git a/testing/mozharness/mozharness/mozilla/secrets.py b/testing/mozharness/mozharness/mozilla/secrets.py
new file mode 100644
index 000000000..d40964bd6
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/secrets.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Support for fetching secrets from the secrets API
+"""
+
+import os
+import mozharness
+import urllib2
+import json
+from mozharness.base.log import ERROR
+
+
+class SecretsMixin(object):
+
+ def _fetch_secret(self, secret_name):
+ self.info("fetching secret {} from API".format(secret_name))
+ # fetch from http://taskcluster, which points to the taskcluster proxy
+ # within a taskcluster task. Outside of that environment, do not
+ # use this action.
+ url = "http://taskcluster/secrets/v1/secret/" + secret_name
+ res = urllib2.urlopen(url)
+ if res.getcode() != 200:
+ self.fatal("Error fetching from secrets API:" + res.read())
+
+ return json.load(res)['secret']['content']
+
+ def get_secrets(self):
+ """
+ Get the secrets specified by the `secret_files` configuration. This is
+ a list of dictionaries, one for each secret. The `secret_name` key
+ names the key in the TaskCluster secrets API to fetch (see
+ http://docs.taskcluster.net/services/secrets/). It can contain
+ %-substitutions based on the `subst` dictionary below.
+
+ Since secrets must be JSON objects, the `content` property of the
+ secret is used as the value to be written to disk.
+
+ The `filename` key in the dictionary gives the filename to which the
+ secret should be written.
+
+ The optional `min_scm_level` key gives a minimum SCM level at which this
+ secret is required. For lower levels, the value of the 'default` key
+ is used, or no secret is written.
+ """
+ if self.config.get('forced_artifact_build'):
+ self.info('Skipping due to forced artifact build.')
+ return
+
+ secret_files = self.config.get('secret_files', [])
+
+ scm_level = self.config.get('scm-level', 1)
+ subst = {
+ 'scm-level': scm_level,
+ }
+
+ for sf in secret_files:
+ filename = sf['filename']
+ secret_name = sf['secret_name'] % subst
+ min_scm_level = sf.get('min_scm_level', 0)
+ if scm_level <= min_scm_level:
+ if 'default' in sf:
+ self.info("Using default value for " + filename)
+ secret = sf['default']
+ else:
+ self.info("No default for secret; not writing " + filename)
+ continue
+ else:
+ secret = self._fetch_secret(secret_name)
+
+ open(filename, "w").write(secret)
diff --git a/testing/mozharness/mozharness/mozilla/selfserve.py b/testing/mozharness/mozharness/mozilla/selfserve.py
new file mode 100644
index 000000000..69e243059
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/selfserve.py
@@ -0,0 +1,47 @@
+import json
+import site
+
+# SelfServeMixin {{{1
+class SelfServeMixin(object):
+ def _get_session(self):
+ site_packages_path = self.query_python_site_packages_path()
+ site.addsitedir(site_packages_path)
+ import requests
+ session = requests.Session()
+ adapter = requests.adapters.HTTPAdapter(max_retries=5)
+ session.mount("http://", adapter)
+ session.mount("https://", adapter)
+ return session
+
+ def _get_base_url(self):
+ return self.config["selfserve_url"].rstrip("/")
+
+ def trigger_nightly_builds(self, branch, revision, auth):
+ session = self._get_session()
+
+ selfserve_base = self._get_base_url()
+ url = "%s/%s/rev/%s/nightly" % (selfserve_base, branch, revision)
+
+ data = {
+ "revision": revision,
+ }
+ self.info("Triggering nightly builds via %s" % url)
+ return session.post(url, data=data, auth=auth).raise_for_status()
+
+ def trigger_arbitrary_job(self, builder, branch, revision, auth, files=None):
+ session = self._get_session()
+
+ selfserve_base = self._get_base_url()
+ url = "%s/%s/builders/%s/%s" % (selfserve_base, branch, builder, revision)
+
+ data = {
+ "properties": json.dumps({
+ "branch": branch,
+ "revision": revision
+ }),
+ }
+ if files:
+ data["files"] = json.dumps(files)
+
+ self.info("Triggering arbritrary job at %s" % url)
+ return session.post(url, data=data, auth=auth).raise_for_status()
diff --git a/testing/mozharness/mozharness/mozilla/signing.py b/testing/mozharness/mozharness/mozilla/signing.py
new file mode 100755
index 000000000..3b16ce595
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/signing.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Mozilla-specific signing methods.
+"""
+
+import os
+import re
+import json
+
+from mozharness.base.errors import BaseErrorList
+from mozharness.base.log import ERROR, FATAL
+from mozharness.base.signing import AndroidSigningMixin, BaseSigningMixin
+
+AndroidSignatureVerificationErrorList = BaseErrorList + [{
+ "regex": re.compile(r'''^Invalid$'''),
+ "level": FATAL,
+ "explanation": "Signature is invalid!"
+}, {
+ "substr": "filename not matched",
+ "level": ERROR,
+}, {
+ "substr": "ERROR: Could not unzip",
+ "level": ERROR,
+}, {
+ "regex": re.compile(r'''Are you sure this is a (nightly|release) package'''),
+ "level": FATAL,
+ "explanation": "Not signed!"
+}]
+
+
+# SigningMixin {{{1
+
+class SigningMixin(BaseSigningMixin):
+ """Generic signing helper methods."""
+ def query_moz_sign_cmd(self, formats=['gpg']):
+ if 'MOZ_SIGNING_SERVERS' not in os.environ:
+ self.fatal("MOZ_SIGNING_SERVERS not in env; no MOZ_SIGN_CMD for you!")
+ dirs = self.query_abs_dirs()
+ signing_dir = os.path.join(dirs['abs_work_dir'], 'tools', 'release', 'signing')
+ cache_dir = os.path.join(dirs['abs_work_dir'], 'signing_cache')
+ token = os.path.join(dirs['base_work_dir'], 'token')
+ nonce = os.path.join(dirs['base_work_dir'], 'nonce')
+ host_cert = os.path.join(signing_dir, 'host.cert')
+ python = self.query_exe('python')
+ cmd = [
+ python,
+ os.path.join(signing_dir, 'signtool.py'),
+ '--cachedir', cache_dir,
+ '-t', token,
+ '-n', nonce,
+ '-c', host_cert,
+ ]
+ if formats:
+ for f in formats:
+ cmd += ['-f', f]
+ for h in os.environ['MOZ_SIGNING_SERVERS'].split(","):
+ cmd += ['-H', h]
+ return cmd
+
+ def generate_signing_manifest(self, files):
+ """Generate signing manifest for signingworkers
+
+ Every entry in the manifest requires a dictionary of
+ "file_to_sign" (basename) and "hash" (SHA512) of every file to be
+ signed. Signing format is defined in the signing task.
+ """
+ manifest_content = [
+ {
+ "file_to_sign": os.path.basename(f),
+ "hash": self.query_sha512sum(f)
+ }
+ for f in files
+ ]
+ return json.dumps(manifest_content)
+
+
+# MobileSigningMixin {{{1
+class MobileSigningMixin(AndroidSigningMixin, SigningMixin):
+ def verify_android_signature(self, apk, script=None, key_alias="nightly",
+ tools_dir="tools/", env=None):
+ """Runs mjessome's android signature verification script.
+ This currently doesn't check to see if the apk exists; you may want
+ to do that before calling the method.
+ """
+ c = self.config
+ dirs = self.query_abs_dirs()
+ if script is None:
+ script = c.get('signature_verification_script')
+ if env is None:
+ env = self.query_env()
+ return self.run_command(
+ [script, "--tools-dir=%s" % tools_dir, "--%s" % key_alias,
+ "--apk=%s" % apk],
+ cwd=dirs['abs_work_dir'],
+ env=env,
+ error_list=AndroidSignatureVerificationErrorList
+ )
diff --git a/testing/mozharness/mozharness/mozilla/structuredlog.py b/testing/mozharness/mozharness/mozilla/structuredlog.py
new file mode 100644
index 000000000..d87c5ebdc
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/structuredlog.py
@@ -0,0 +1,173 @@
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import json
+
+from mozharness.base import log
+from mozharness.base.log import OutputParser, WARNING, INFO, ERROR
+from mozharness.mozilla.buildbot import TBPL_WARNING, TBPL_FAILURE
+from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WORST_LEVEL_TUPLE
+from mozharness.mozilla.testing.unittest import tbox_print_summary
+
+
+class StructuredOutputParser(OutputParser):
+ # The script class using this must inherit the MozbaseMixin to ensure
+ # that mozlog is available.
+ def __init__(self, **kwargs):
+ """Object that tracks the overall status of the test run"""
+ # The 'strict' argument dictates whether the presence of output
+ # from the harness process other than line-delimited json indicates
+ # failure. If it does not, the errors_list parameter may be used
+ # to detect additional failure output from the harness process.
+ if 'strict' in kwargs:
+ self.strict = kwargs.pop('strict')
+ else:
+ self.strict = True
+
+ self.suite_category = kwargs.pop('suite_category', None)
+
+ tbpl_compact = kwargs.pop("log_compact", False)
+ super(StructuredOutputParser, self).__init__(**kwargs)
+
+ mozlog = self._get_mozlog_module()
+ self.formatter = mozlog.formatters.TbplFormatter(compact=tbpl_compact)
+ self.handler = mozlog.handlers.StatusHandler()
+ self.log_actions = mozlog.structuredlog.log_actions()
+
+ self.worst_log_level = INFO
+ self.tbpl_status = TBPL_SUCCESS
+
+ def _get_mozlog_module(self):
+ try:
+ import mozlog
+ except ImportError:
+ self.fatal("A script class using structured logging must inherit "
+ "from the MozbaseMixin to ensure that mozlog is available.")
+ return mozlog
+
+ def _handle_unstructured_output(self, line):
+ if self.strict:
+ self.critical(("Test harness output was not a valid structured log message: "
+ "\n%s") % line)
+ self.update_levels(TBPL_FAILURE, log.CRITICAL)
+ return
+ super(StructuredOutputParser, self).parse_single_line(line)
+
+
+ def parse_single_line(self, line):
+ """Parses a line of log output from the child process and passes
+ it to mozlog to update the overall status of the run.
+ Re-emits the logged line in human-readable format.
+ """
+ level = INFO
+ tbpl_level = TBPL_SUCCESS
+
+ data = None
+ try:
+ candidate_data = json.loads(line)
+ if (isinstance(candidate_data, dict) and
+ 'action' in candidate_data and candidate_data['action'] in self.log_actions):
+ data = candidate_data
+ except ValueError:
+ pass
+
+ if data is None:
+ self._handle_unstructured_output(line)
+ return
+
+ self.handler(data)
+
+ action = data["action"]
+ if action == "log":
+ level = getattr(log, data["level"].upper())
+
+ log_data = self.formatter(data)
+ if log_data is not None:
+ self.log(log_data, level=level)
+ self.update_levels(tbpl_level, level)
+
+ def evaluate_parser(self, return_code, success_codes=None):
+ success_codes = success_codes or [0]
+ summary = self.handler.summarize()
+
+ fail_pair = TBPL_WARNING, WARNING
+ error_pair = TBPL_FAILURE, ERROR
+
+ # These are warning/orange statuses.
+ failure_conditions = [
+ sum(summary.unexpected_statuses.values()) > 0,
+ summary.action_counts.get('crash', 0) > summary.expected_statuses.get('CRASH', 0),
+ summary.action_counts.get('valgrind_error', 0) > 0
+ ]
+ for condition in failure_conditions:
+ if condition:
+ self.update_levels(*fail_pair)
+
+ # These are error/red statuses. A message is output here every time something
+ # wouldn't otherwise be highlighted in the UI.
+ required_actions = {
+ 'suite_end': 'No suite end message was emitted by this harness.',
+ 'test_end': 'No checks run.',
+ }
+ for action, diagnostic_message in required_actions.iteritems():
+ if action not in summary.action_counts:
+ self.log(diagnostic_message, ERROR)
+ self.update_levels(*error_pair)
+
+ failure_log_levels = ['ERROR', 'CRITICAL']
+ for level in failure_log_levels:
+ if level in summary.log_level_counts:
+ self.update_levels(*error_pair)
+
+ # If a superclass was used to detect errors with a regex based output parser,
+ # this will be reflected in the status here.
+ if self.num_errors:
+ self.update_levels(*error_pair)
+
+ # Harnesses typically return non-zero on test failure, so don't promote
+ # to error if we already have a failing status.
+ if return_code not in success_codes and self.tbpl_status == TBPL_SUCCESS:
+ self.update_levels(*error_pair)
+
+ return self.tbpl_status, self.worst_log_level
+
+ def update_levels(self, tbpl_level, log_level):
+ self.worst_log_level = self.worst_level(log_level, self.worst_log_level)
+ self.tbpl_status = self.worst_level(tbpl_level, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ def print_summary(self, suite_name):
+ # Summary text provided for compatibility. Counts are currently
+ # in the format <pass count>/<fail count>/<todo count>,
+ # <expected count>/<unexpected count>/<expected fail count> will yield the
+ # expected info from a structured log (fail count from the prior implementation
+ # includes unexpected passes from "todo" assertions).
+ summary = self.handler.summarize()
+ unexpected_count = sum(summary.unexpected_statuses.values())
+ expected_count = sum(summary.expected_statuses.values())
+ expected_failures = summary.expected_statuses.get('FAIL', 0)
+
+ if unexpected_count:
+ fail_text = '<em class="testfail">%s</em>' % unexpected_count
+ else:
+ fail_text = '0'
+
+ text_summary = "%s/%s/%s" % (expected_count, fail_text, expected_failures)
+ self.info("TinderboxPrint: %s<br/>%s\n" % (suite_name, text_summary))
+
+ def append_tinderboxprint_line(self, suite_name):
+ summary = self.handler.summarize()
+ unexpected_count = sum(summary.unexpected_statuses.values())
+ expected_count = sum(summary.expected_statuses.values())
+ expected_failures = summary.expected_statuses.get('FAIL', 0)
+ crashed = 0
+ if 'crash' in summary.action_counts:
+ crashed = summary.action_counts['crash']
+ text_summary = tbox_print_summary(expected_count,
+ unexpected_count,
+ expected_failures,
+ crashed > 0,
+ False)
+ self.info("TinderboxPrint: %s<br/>%s\n" % (suite_name, text_summary))
diff --git a/testing/mozharness/mozharness/mozilla/taskcluster_helper.py b/testing/mozharness/mozharness/mozilla/taskcluster_helper.py
new file mode 100644
index 000000000..6921b8938
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/taskcluster_helper.py
@@ -0,0 +1,274 @@
+"""Taskcluster module. Defines a few helper functions to call into the taskcluster
+ client.
+"""
+import os
+from datetime import datetime, timedelta
+from urlparse import urljoin
+
+from mozharness.base.log import LogMixin
+
+
+# Taskcluster {{{1
+class Taskcluster(LogMixin):
+ """
+ Helper functions to report data to Taskcluster
+ """
+ def __init__(self, branch, rank, client_id, access_token, log_obj,
+ task_id=None):
+ self.rank = rank
+ self.log_obj = log_obj
+
+ # Try builds use a different set of credentials which have access to the
+ # buildbot-try scope.
+ if branch == 'try':
+ self.buildbot = 'buildbot-try'
+ else:
+ self.buildbot = 'buildbot'
+
+ # We can't import taskcluster at the top of the script because it is
+ # part of the virtualenv, so import it now. The virtualenv needs to be
+ # activated before this point by the mozharness script, or else we won't
+ # be able to find this module.
+ import taskcluster
+ taskcluster.config['credentials']['clientId'] = client_id
+ taskcluster.config['credentials']['accessToken'] = access_token
+ self.taskcluster_queue = taskcluster.Queue()
+ self.task_id = task_id or taskcluster.slugId()
+ self.put_file = taskcluster.utils.putFile
+
+ def create_task(self, routes):
+ curdate = datetime.utcnow()
+ self.info("Taskcluster taskId: %s" % self.task_id)
+ self.info("Routes: %s" % routes)
+ task = self.taskcluster_queue.createTask({
+ # The null-provisioner and buildbot worker type don't actually exist.
+ # So this task doesn't actually run - we just need to create the task so
+ # we have something to attach artifacts to.
+ "provisionerId": "null-provisioner",
+ "workerType": self.buildbot,
+ "created": curdate,
+ "deadline": curdate + timedelta(hours=1),
+ "routes": routes,
+ "payload": {
+ },
+ "extra": {
+ "index": {
+ "rank": self.rank,
+ },
+ },
+ "metadata": {
+ "name": "Buildbot/mozharness S3 uploader",
+ "description": "Upload outputs of buildbot/mozharness builds to S3",
+ "owner": "mshal@mozilla.com",
+ "source": "http://hg.mozilla.org/build/mozharness/",
+ }
+ }, taskId=self.task_id)
+ return task
+
+ def claim_task(self, task):
+ self.taskcluster_queue.claimTask(
+ task['status']['taskId'],
+ task['status']['runs'][-1]['runId'],
+ {
+ "workerGroup": self.buildbot,
+ "workerId": self.buildbot,
+ })
+
+ def get_task(self, task_id):
+ return self.taskcluster_queue.status(task_id)
+
+ @staticmethod
+ def get_mime_type(ext, default='application/octet-stream'):
+ mime_types = {
+ ".asc": "text/plain",
+ ".checksums": "text/plain",
+ ".json": "application/json",
+ ".log": "text/plain",
+ ".tar.bz2": "application/x-gtar",
+ ".txt": "text/plain",
+ ".xpi": "application/x-xpinstall",
+ ".zip": "application/zip",
+ }
+ return mime_types.get(ext, default)
+
+ @property
+ def expiration(self):
+ weeks = 52
+ if self.buildbot == 'buildbot-try':
+ weeks = 3
+ return datetime.utcnow() + timedelta(weeks=weeks)
+
+ def create_artifact(self, task, filename):
+ mime_type = self.get_mime_type(os.path.splitext(filename)[1])
+ content_length = os.path.getsize(filename)
+ self.info("Uploading to S3: filename=%s mimetype=%s length=%s" % (
+ filename, mime_type, content_length))
+ # reclaim the task to avoid "claim-expired" errors
+ self.taskcluster_queue.reclaimTask(
+ task['status']['taskId'], task['status']['runs'][-1]['runId'])
+ artifact = self.taskcluster_queue.createArtifact(
+ task['status']['taskId'],
+ task['status']['runs'][-1]['runId'],
+ 'public/build/%s' % os.path.basename(filename),
+ {
+ "storageType": "s3",
+ "expires": self.expiration,
+ "contentType": mime_type,
+ })
+ self.put_file(filename, artifact['putUrl'], mime_type)
+ return self.get_taskcluster_url(filename)
+
+ def create_reference_artifact(self, task, filename, url):
+ mime_type = self.get_mime_type(os.path.splitext(filename)[1])
+ self.info("Create reference artifact: filename=%s mimetype=%s url=%s" %
+ (filename, mime_type, url))
+ # reclaim the task to avoid "claim-expired" errors
+ self.taskcluster_queue.reclaimTask(
+ task['status']['taskId'], task['status']['runs'][-1]['runId'])
+ self.taskcluster_queue.createArtifact(
+ task['status']['taskId'],
+ task['status']['runs'][-1]['runId'],
+ 'public/build/%s' % os.path.basename(filename),
+ {
+ "storageType": "reference",
+ "expires": self.expiration,
+ "contentType": mime_type,
+ "url": url,
+ })
+
+ def report_completed(self, task):
+ task_id = task['status']['taskId']
+ run_id = task['status']['runs'][-1]['runId']
+ self.info("Resolving %s, run %s. Full task:" % (task_id, run_id))
+ self.info(str(task))
+ self.taskcluster_queue.reportCompleted(task_id, run_id)
+
+ def report_failed(self, task):
+ task_id = task['status']['taskId']
+ run_id = task['status']['runs'][-1]['runId']
+ self.info("Resolving %s as failed, run %s. Full task:" %
+ (task_id, run_id))
+ self.info(str(task))
+ self.taskcluster_queue.reportFailed(task_id, run_id)
+
+ def get_taskcluster_url(self, filename):
+ return 'https://queue.taskcluster.net/v1/task/%s/artifacts/public/build/%s' % (
+ self.task_id,
+ os.path.basename(filename)
+ )
+
+
+# TasckClusterArtifactFinderMixin {{{1
+class TaskClusterArtifactFinderMixin(object):
+ # This class depends that you have extended from the base script
+ QUEUE_URL = 'https://queue.taskcluster.net/v1/task/'
+ SCHEDULER_URL = 'https://scheduler.taskcluster.net/v1/task-graph/'
+
+ def get_task(self, task_id):
+ """ Get Task Definition """
+ # Signature: task(taskId) : result
+ return self.load_json_url(urljoin(self.QUEUE_URL, task_id))
+
+ def get_list_latest_artifacts(self, task_id):
+ """ Get Artifacts from Latest Run """
+ # Signature: listLatestArtifacts(taskId) : result
+
+ # Notice that this grabs the most recent run of a task since we don't
+ # know the run_id. This slightly slower, however, it is more convenient
+ return self.load_json_url(urljoin(self.QUEUE_URL, '{}/artifacts'.format(task_id)))
+
+ def url_to_artifact(self, task_id, full_path):
+ """ Return a URL for an artifact. """
+ return urljoin(self.QUEUE_URL, '{}/artifacts/{}'.format(task_id, full_path))
+
+ def get_inspect_graph(self, task_group_id):
+ """ Inspect Task Graph """
+ # Signature: inspect(taskGraphId) : result
+ return self.load_json_url(urljoin(self.SCHEDULER_URL, '{}/inspect'.format(task_group_id)))
+
+ def find_parent_task_id(self, task_id):
+ """ Returns the task_id of the parent task associated to the given task_id."""
+ # Find group id to associated to all related tasks
+ task_group_id = self.get_task(task_id)['taskGroupId']
+
+ # Find child task and determine on which task it depends on
+ for task in self.get_inspect_graph(task_group_id)['tasks']:
+ if task['taskId'] == task_id:
+ parent_task_id = task['requires'][0]
+
+ return parent_task_id
+
+ def set_bbb_artifacts(self, task_id, properties_file_path):
+ """ Find BBB artifacts through properties_file_path and set them. """
+ p = self.load_json_url(
+ self.url_to_artifact(task_id, properties_file_path))['properties']
+
+ # Set importants artifacts for test jobs
+ self.set_artifacts(
+ p['packageUrl'] if p.get('packageUrl') else None,
+ p['testPackagesUrl'] if p.get('testPackagesUrl') else None,
+ p['symbolsUrl'] if p.get('symbolsUrl') else None
+ )
+
+ def set_artifacts(self, installer, tests, symbols):
+ """ Sets installer, test and symbols URLs from the artifacts of BBB based task."""
+ self.installer_url, self.test_url, self.symbols_url = installer, tests, symbols
+ self.info('Set installer_url: %s' % self.installer_url)
+ self.info('Set test_url: %s' % self.test_url)
+ self.info('Set symbols_url: %s' % self.symbols_url)
+
+ def set_parent_artifacts(self, child_task_id):
+ """ Find and set installer_url, test_url and symbols_url by querying TaskCluster.
+
+ In Buildbot Bridge's normal behaviour we can find the artifacts by inspecting
+ a child's taskId, determine the task in which it depends on and find the uploaded
+ artifacts.
+
+ In order to support multi-tiered task graph scheduling for BBB triggered tasks,
+ we remove the assumption that the task which depends on is the one from which we
+ find the artifacts we need. Instead, we can set a parent_task_id which points to the
+ tasks from which to retrieve the artifacts. This decouples task dependency from task
+ from which to grab the artifacts.
+
+ In-tree triggered BBB tasks do not use parent_task_id, once there is efforts to move
+ the scheduling into tree we can make parent_task_id as the only method.
+
+ """
+ # Task definition
+ child_task = self.get_task(child_task_id)
+
+ # Case A: The parent_task_id is defined (mozci scheduling)
+ if child_task['payload']['properties'].get('parent_task_id'):
+ # parent_task_id is used to point to the task from which to grab artifacts
+ # rather than the one we depend on
+ parent_id = child_task['payload']['properties']['parent_task_id']
+
+ # Find out where the parent task uploaded the build
+ parent_task = self.get_task(parent_id)
+
+ # Case 1: The parent task is a pure TC task
+ if parent_task['extra'].get('locations'):
+ # Build tasks generated under TC specify where they upload their builds
+ installer_path = parent_task['extra']['locations']['build']
+
+ self.set_artifacts(
+ self.url_to_artifact(parent_id, installer_path),
+ self.url_to_artifact(parent_id, 'public/build/test_packages.json'),
+ self.url_to_artifact(parent_id, 'public/build/target.crashreporter-symbols.zip')
+ )
+ else:
+ # Case 2: The parent task has an associated BBB task
+ # graph_props.json is uploaded in buildbase.py
+ self.set_bbb_artifacts(
+ task_id=parent_id,
+ properties_file_path='public/build/buildbot_properties.json'
+ )
+
+ else:
+ # Case B: We need to query who the parent is since 'parent_task_id'
+ # was not defined as a Buildbot property
+ parent_id = self.find_parent_task_id(child_task_id)
+ self.set_bbb_artifacts(
+ task_id=parent_id,
+ properties_file_path='public/build/buildbot_properties.json'
+ )
diff --git a/testing/mozharness/mozharness/mozilla/testing/__init__.py b/testing/mozharness/mozharness/mozilla/testing/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/__init__.py
diff --git a/testing/mozharness/mozharness/mozilla/testing/codecoverage.py b/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
new file mode 100644
index 000000000..9cb824679
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+
+from mozharness.base.script import (
+ PreScriptAction,
+ PostScriptAction,
+)
+
+code_coverage_config_options = [
+ [["--code-coverage"],
+ {"action": "store_true",
+ "dest": "code_coverage",
+ "default": False,
+ "help": "Whether test run should package and upload code coverage data."
+ }],
+]
+
+
+class CodeCoverageMixin(object):
+ """
+ Mixin for setting GCOV_PREFIX during test execution, packaging up
+ the resulting .gcda files and uploading them to blobber.
+ """
+ gcov_dir = None
+
+ @property
+ def code_coverage_enabled(self):
+ try:
+ if self.config.get('code_coverage'):
+ return True
+
+ # XXX workaround because bug 1110465 is hard
+ return self.buildbot_config['properties']['stage_platform'] in ('linux64-ccov',)
+ except (AttributeError, KeyError, TypeError):
+ return False
+
+
+ @PreScriptAction('run-tests')
+ def _set_gcov_prefix(self, action):
+ if not self.code_coverage_enabled:
+ return
+ self.gcov_dir = tempfile.mkdtemp()
+ os.environ['GCOV_PREFIX'] = self.gcov_dir
+
+ @PostScriptAction('run-tests')
+ def _package_coverage_data(self, action, success=None):
+ if not self.code_coverage_enabled:
+ return
+ del os.environ['GCOV_PREFIX']
+
+ # TODO This is fragile, find rel_topsrcdir properly somehow
+ # We need to find the path relative to the gecko topsrcdir. Use
+ # some known gecko directories as a test.
+ canary_dirs = ['browser', 'docshell', 'dom', 'js', 'layout', 'toolkit', 'xpcom', 'xpfe']
+ rel_topsrcdir = None
+ for root, dirs, files in os.walk(self.gcov_dir):
+ # need to use 'any' in case no gcda data was generated in that subdir.
+ if any(d in dirs for d in canary_dirs):
+ rel_topsrcdir = root
+ break
+ else:
+ # Unable to upload code coverage files. Since this is the whole
+ # point of code coverage, making this fatal.
+ self.fatal("Could not find relative topsrcdir in code coverage "
+ "data!")
+
+ dirs = self.query_abs_dirs()
+ file_path = os.path.join(
+ dirs['abs_blob_upload_dir'], 'code-coverage-gcda.zip')
+ command = ['zip', '-r', file_path, '.']
+ self.run_command(command, cwd=rel_topsrcdir)
+ shutil.rmtree(self.gcov_dir)
diff --git a/testing/mozharness/mozharness/mozilla/testing/device.py b/testing/mozharness/mozharness/mozilla/testing/device.py
new file mode 100644
index 000000000..fea43ba20
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/device.py
@@ -0,0 +1,738 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+'''Interact with a device via ADB or SUT.
+
+This code is largely from
+https://hg.mozilla.org/build/tools/file/default/sut_tools
+'''
+
+import datetime
+import os
+import re
+import subprocess
+import sys
+import time
+
+from mozharness.base.errors import ADBErrorList
+from mozharness.base.log import LogMixin, DEBUG
+from mozharness.base.script import ScriptMixin
+
+
+# Device flags
+DEVICE_UNREACHABLE = 0x01
+DEVICE_NOT_CONNECTED = 0x02
+DEVICE_MISSING_SDCARD = 0x03
+DEVICE_HOST_ERROR = 0x04
+# DEVICE_UNRECOVERABLE_ERROR?
+DEVICE_NOT_REBOOTED = 0x05
+DEVICE_CANT_REMOVE_DEVROOT = 0x06
+DEVICE_CANT_REMOVE_ETC_HOSTS = 0x07
+DEVICE_CANT_SET_TIME = 0x08
+
+
+class DeviceException(Exception):
+ pass
+
+
+# BaseDeviceHandler {{{1
+class BaseDeviceHandler(ScriptMixin, LogMixin):
+ device_id = None
+ device_root = None
+ default_port = None
+ device_flags = []
+
+ def __init__(self, log_obj=None, config=None, script_obj=None):
+ super(BaseDeviceHandler, self).__init__()
+ self.config = config
+ self.log_obj = log_obj
+ self.script_obj = script_obj
+
+ def add_device_flag(self, flag):
+ if flag not in self.device_flags:
+ self.device_flags.append(flag)
+
+ def query_device_id(self):
+ if self.device_id:
+ return self.device_id
+ c = self.config
+ device_id = None
+ if c.get('device_id'):
+ device_id = c['device_id']
+ elif c.get('device_ip'):
+ device_id = "%s:%s" % (c['device_ip'],
+ c.get('device_port', self.default_port))
+ self.device_id = device_id
+ return self.device_id
+
+ def query_download_filename(self, file_id=None):
+ pass
+
+ def ping_device(self):
+ pass
+
+ def check_device(self):
+ pass
+
+ def cleanup_device(self, reboot=False):
+ pass
+
+ def reboot_device(self):
+ pass
+
+ def query_device_root(self):
+ pass
+
+ def wait_for_device(self, interval=60, max_attempts=20):
+ pass
+
+ def install_app(self, file_path):
+ pass
+
+
+# ADBDeviceHandler {{{1
+class ADBDeviceHandler(BaseDeviceHandler):
+ def __init__(self, **kwargs):
+ super(ADBDeviceHandler, self).__init__(**kwargs)
+ self.default_port = 5555
+
+ def query_device_exe(self, exe_name):
+ return self.query_exe(exe_name, exe_dict="device_exes")
+
+ def _query_config_device_id(self):
+ return BaseDeviceHandler.query_device_id(self)
+
+ def query_device_id(self, auto_connect=True):
+ if self.device_id:
+ return self.device_id
+ device_id = self._query_config_device_id()
+ if device_id:
+ if auto_connect:
+ self.ping_device(auto_connect=True)
+ else:
+ self.info("Trying to find device...")
+ devices = self._query_attached_devices()
+ if not devices:
+ self.add_device_flag(DEVICE_NOT_CONNECTED)
+ self.fatal("No device connected via adb!\nUse 'adb connect' or specify a device_id or device_ip in config!")
+ elif len(devices) > 1:
+ self.warning("""More than one device detected; specify 'device_id' or\n'device_ip' to target a specific device!""")
+ device_id = devices[0]
+ self.info("Found %s." % device_id)
+ self.device_id = device_id
+ return self.device_id
+
+ # maintenance {{{2
+ def ping_device(self, auto_connect=False, silent=False):
+ if auto_connect and not self._query_attached_devices():
+ self.connect_device()
+ if not silent:
+ self.info("Determining device connectivity over adb...")
+ device_id = self.query_device_id()
+ adb = self.query_exe('adb')
+ uptime = self.query_device_exe('uptime')
+ output = self.get_output_from_command([adb, "-s", device_id,
+ "shell", uptime],
+ silent=silent)
+ if str(output).startswith("up time:"):
+ if not silent:
+ self.info("Found %s." % device_id)
+ return True
+ elif auto_connect:
+ # TODO retry?
+ self.connect_device()
+ return self.ping_device()
+ else:
+ if not silent:
+ self.error("Can't find a device.")
+ return False
+
+ def _query_attached_devices(self):
+ devices = []
+ adb = self.query_exe('adb')
+ output = self.get_output_from_command([adb, "devices"])
+ starting_list = False
+ if output is None:
+ self.add_device_flag(DEVICE_HOST_ERROR)
+ self.fatal("Can't get output from 'adb devices'; install the Android SDK!")
+ for line in output:
+ if 'adb: command not found' in line:
+ self.add_device_flag(DEVICE_HOST_ERROR)
+ self.fatal("Can't find adb; install the Android SDK!")
+ if line.startswith("* daemon"):
+ continue
+ if line.startswith("List of devices"):
+ starting_list = True
+ continue
+ # TODO somehow otherwise determine whether this is an actual
+ # device?
+ if starting_list:
+ devices.append(re.split('\s+', line)[0])
+ return devices
+
+ def connect_device(self):
+ self.info("Connecting device...")
+ adb = self.query_exe('adb')
+ cmd = [adb, "connect"]
+ device_id = self._query_config_device_id()
+ if device_id:
+ devices = self._query_attached_devices()
+ if device_id in devices:
+ # TODO is this the right behavior?
+ self.disconnect_device()
+ cmd.append(device_id)
+ # TODO error check
+ self.run_command(cmd, error_list=ADBErrorList)
+
+ def disconnect_device(self):
+ self.info("Disconnecting device...")
+ device_id = self.query_device_id()
+ if device_id:
+ adb = self.query_exe('adb')
+ # TODO error check
+ self.run_command([adb, "-s", device_id,
+ "disconnect"],
+ error_list=ADBErrorList)
+ else:
+ self.info("No device found.")
+
+ def check_device(self):
+ if not self.ping_device(auto_connect=True):
+ self.add_device_flag(DEVICE_NOT_CONNECTED)
+ self.fatal("Can't find device!")
+ if self.query_device_root() is None:
+ self.add_device_flag(DEVICE_NOT_CONNECTED)
+ self.fatal("Can't connect to device!")
+
+ def reboot_device(self):
+ if not self.ping_device(auto_connect=True):
+ self.add_device_flag(DEVICE_NOT_REBOOTED)
+ self.error("Can't reboot disconnected device!")
+ return False
+ device_id = self.query_device_id()
+ self.info("Rebooting device...")
+ adb = self.query_exe('adb')
+ cmd = [adb, "-s", device_id, "reboot"]
+ self.info("Running command (in the background): %s" % cmd)
+ # This won't exit until much later, but we don't need to wait.
+ # However, some error checking would be good.
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ time.sleep(10)
+ self.disconnect_device()
+ status = False
+ try:
+ self.wait_for_device()
+ status = True
+ except DeviceException:
+ self.error("Can't reconnect to device!")
+ if p.poll() is None:
+ p.kill()
+ p.wait()
+ return status
+
+ def cleanup_device(self, reboot=False):
+ self.info("Cleaning up device.")
+ c = self.config
+ device_id = self.query_device_id()
+ status = self.remove_device_root()
+ if not status:
+ self.add_device_flag(DEVICE_CANT_REMOVE_DEVROOT)
+ self.fatal("Can't remove device root!")
+ if c.get("enable_automation"):
+ self.remove_etc_hosts()
+ if c.get("device_package_name"):
+ adb = self.query_exe('adb')
+ killall = self.query_device_exe('killall')
+ self.run_command([adb, "-s", device_id, "shell",
+ killall, c["device_package_name"]],
+ error_list=ADBErrorList)
+ self.uninstall_app(c['device_package_name'])
+ if reboot:
+ self.reboot_device()
+
+ # device calls {{{2
+ def query_device_root(self, silent=False):
+ if self.device_root:
+ return self.device_root
+ device_root = None
+ device_id = self.query_device_id()
+ adb = self.query_exe('adb')
+ output = self.get_output_from_command("%s -s %s shell df" % (adb, device_id),
+ silent=silent)
+ # TODO this assumes we're connected; error checking?
+ if output is None or ' not found' in str(output):
+ self.error("Can't get output from 'adb shell df'!\n%s" % output)
+ return None
+ if "/mnt/sdcard" in output:
+ device_root = "/mnt/sdcard/tests"
+ else:
+ device_root = "/data/local/tmp/tests"
+ if not silent:
+ self.info("Device root is %s" % str(device_root))
+ self.device_root = device_root
+ return self.device_root
+
+ # TODO from here on down needs to be copied to Base+SUT
+ def wait_for_device(self, interval=60, max_attempts=20):
+ self.info("Waiting for device to come back...")
+ time.sleep(interval)
+ tries = 0
+ while tries <= max_attempts:
+ tries += 1
+ self.info("Try %d" % tries)
+ if self.ping_device(auto_connect=True, silent=True):
+ return self.ping_device()
+ time.sleep(interval)
+ raise DeviceException("Remote Device Error: waiting for device timed out.")
+
+ def query_device_time(self):
+ device_id = self.query_device_id()
+ adb = self.query_exe('adb')
+ # adb shell 'date' will give a date string
+ date_string = self.get_output_from_command([adb, "-s", device_id,
+ "shell", "date"])
+ # TODO what to do when we error?
+ return date_string
+
+ def set_device_time(self, device_time=None, error_level='error'):
+ # adb shell date -s YYYYMMDD.hhmmss will set date
+ device_id = self.query_device_id()
+ if device_time is None:
+ device_time = time.strftime("%Y%m%d.%H%M%S")
+ self.info(self.query_device_time())
+ adb = self.query_exe('adb')
+ status = self.run_command([adb, "-s", device_id, "shell", "date", "-s",
+ str(device_time)],
+ error_list=ADBErrorList)
+ self.info(self.query_device_time())
+ return status
+
+ def query_device_file_exists(self, file_name):
+ device_id = self.query_device_id()
+ adb = self.query_exe('adb')
+ output = self.get_output_from_command([adb, "-s", device_id,
+ "shell", "ls", "-d", file_name])
+ if str(output).rstrip() == file_name:
+ return True
+ return False
+
+ def remove_device_root(self, error_level='error'):
+ device_root = self.query_device_root()
+ device_id = self.query_device_id()
+ if device_root is None:
+ self.add_device_flag(DEVICE_UNREACHABLE)
+ self.fatal("Can't connect to device!")
+ adb = self.query_exe('adb')
+ if self.query_device_file_exists(device_root):
+ self.info("Removing device root %s." % device_root)
+ self.run_command([adb, "-s", device_id, "shell", "rm",
+ "-r", device_root], error_list=ADBErrorList)
+ if self.query_device_file_exists(device_root):
+ self.add_device_flag(DEVICE_CANT_REMOVE_DEVROOT)
+ self.log("Unable to remove device root!", level=error_level)
+ return False
+ return True
+
+ def install_app(self, file_path):
+ c = self.config
+ device_id = self.query_device_id()
+ adb = self.query_exe('adb')
+ if self._log_level_at_least(DEBUG):
+ self.run_command([adb, "-s", device_id, "shell", "ps"],
+ error_list=ADBErrorList)
+ uptime = self.query_device_exe('uptime')
+ self.run_command([adb, "-s", "shell", uptime],
+ error_list=ADBErrorList)
+ if not c['enable_automation']:
+ # -s to install on sdcard? Needs to be config driven
+ self.run_command([adb, "-s", device_id, "install", '-r',
+ file_path],
+ error_list=ADBErrorList)
+ else:
+ # A slow-booting device may not allow installs, temporarily.
+ # Wait up to a few minutes if not immediately successful.
+ # Note that "adb install" typically writes status messages
+ # to stderr and the adb return code may not differentiate
+ # successful installations from failures; instead we check
+ # the command output.
+ install_complete = False
+ retries = 0
+ while retries < 6:
+ output = self.get_output_from_command([adb, "-s", device_id,
+ "install", '-r',
+ file_path],
+ ignore_errors=True)
+ if output and output.lower().find("success") >= 0:
+ install_complete = True
+ break
+ self.warning("Failed to install %s" % file_path)
+ time.sleep(30)
+ retries = retries + 1
+ if not install_complete:
+ self.fatal("Failed to install %s!" % file_path)
+
+ def uninstall_app(self, package_name, package_root="/data/data",
+ error_level="error"):
+ c = self.config
+ device_id = self.query_device_id()
+ self.info("Uninstalling %s..." % package_name)
+ if self.query_device_file_exists('%s/%s' % (package_root, package_name)):
+ adb = self.query_exe('adb')
+ cmd = [adb, "-s", device_id, "uninstall"]
+ if not c.get('enable_automation'):
+ cmd.append("-k")
+ cmd.append(package_name)
+ status = self.run_command(cmd, error_list=ADBErrorList)
+ # TODO is this the right error check?
+ if status:
+ self.log("Failed to uninstall %s!" % package_name,
+ level=error_level)
+
+ # Device-type-specific. {{{2
+ def remove_etc_hosts(self, hosts_file="/system/etc/hosts"):
+ c = self.config
+ if c['device_type'] not in ("tegra250",):
+ self.debug("No need to remove /etc/hosts on a non-Tegra250.")
+ return
+ device_id = self.query_device_id()
+ if self.query_device_file_exists(hosts_file):
+ self.info("Removing %s file." % hosts_file)
+ adb = self.query_exe('adb')
+ self.run_command([adb, "-s", device_id, "shell",
+ "mount", "-o", "remount,rw", "-t", "yaffs2",
+ "/dev/block/mtdblock3", "/system"],
+ error_list=ADBErrorList)
+ self.run_command([adb, "-s", device_id, "shell", "rm",
+ hosts_file])
+ if self.query_device_file_exists(hosts_file):
+ self.add_device_flag(DEVICE_CANT_REMOVE_ETC_HOSTS)
+ self.fatal("Unable to remove %s!" % hosts_file)
+ else:
+ self.debug("%s file doesn't exist; skipping." % hosts_file)
+
+
+# SUTDeviceHandler {{{1
+class SUTDeviceHandler(BaseDeviceHandler):
+ def __init__(self, **kwargs):
+ super(SUTDeviceHandler, self).__init__(**kwargs)
+ self.devicemanager = None
+ self.default_port = 20701
+ self.default_heartbeat_port = 20700
+ self.DMError = None
+
+ def query_devicemanager(self):
+ if self.devicemanager:
+ return self.devicemanager
+ c = self.config
+ site_packages_path = self.script_obj.query_python_site_packages_path()
+ dm_path = os.path.join(site_packages_path, 'mozdevice')
+ sys.path.append(dm_path)
+ try:
+ from devicemanagerSUT import DeviceManagerSUT
+ from devicemanagerSUT import DMError
+ self.DMError = DMError
+ self.devicemanager = DeviceManagerSUT(c['device_ip'])
+ # TODO configurable?
+ self.devicemanager.debug = c.get('devicemanager_debug_level', 0)
+ except ImportError, e:
+ self.fatal("Can't import DeviceManagerSUT! %s\nDid you check out talos?" % str(e))
+ return self.devicemanager
+
+ # maintenance {{{2
+ def ping_device(self):
+ #TODO writeme
+ pass
+
+ def check_device(self):
+ self.info("Checking for device root to verify the device is alive.")
+ dev_root = self.query_device_root(strict=True)
+ if not dev_root:
+ self.add_device_flag(DEVICE_UNREACHABLE)
+ self.fatal("Can't get dev_root from devicemanager; is the device up?")
+ self.info("Found a dev_root of %s." % str(dev_root))
+
+ def wait_for_device(self, interval=60, max_attempts=20):
+ self.info("Waiting for device to come back...")
+ time.sleep(interval)
+ success = False
+ attempts = 0
+ while attempts <= max_attempts:
+ attempts += 1
+ self.info("Try %d" % attempts)
+ if self.query_device_root() is not None:
+ success = True
+ break
+ time.sleep(interval)
+ if not success:
+ self.add_device_flag(DEVICE_UNREACHABLE)
+ self.fatal("Waiting for tegra timed out.")
+ else:
+ self.info("Device came back.")
+
+ def cleanup_device(self, reboot=False):
+ c = self.config
+ dev_root = self.query_device_root()
+ dm = self.query_devicemanager()
+ if dm.dirExists(dev_root):
+ self.info("Removing dev_root %s..." % dev_root)
+ try:
+ dm.removeDir(dev_root)
+ except self.DMError:
+ self.add_device_flag(DEVICE_CANT_REMOVE_DEVROOT)
+ self.fatal("Can't remove dev_root!")
+ if c.get("enable_automation"):
+ self.remove_etc_hosts()
+ # TODO I need to abstract this uninstall as we'll need to clean
+ # multiple packages off devices.
+ if c.get("device_package_name"):
+ if dm.dirExists('/data/data/%s' % c['device_package_name']):
+ self.info("Uninstalling %s..." % c['device_package_name'])
+ dm.uninstallAppAndReboot(c['device_package_name'])
+ self.wait_for_device()
+ elif reboot:
+ self.reboot_device()
+
+ # device calls {{{2
+ def query_device_root(self, strict=False):
+ c = self.config
+ dm = self.query_devicemanager()
+ dev_root = dm.getDeviceRoot()
+ if strict and c.get('enable_automation'):
+ if not str(dev_root).startswith("/mnt/sdcard"):
+ self.add_device_flag(DEVICE_MISSING_SDCARD)
+ self.fatal("dev_root from devicemanager [%s] is not correct!" %
+ str(dev_root))
+ if not dev_root or dev_root == "/tests":
+ return None
+ return dev_root
+
+ def query_device_time(self):
+ dm = self.query_devicemanager()
+ timestamp = int(dm.getCurrentTime()) # epoch time in milliseconds
+ dt = datetime.datetime.utcfromtimestamp(timestamp / 1000)
+ self.info("Current device time is %s" % dt.strftime('%Y/%m/%d %H:%M:%S'))
+ return dt
+
+ def set_device_time(self):
+ dm = self.query_devicemanager()
+ s = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+ self.info("Setting device time to %s" % s)
+ try:
+ dm.sendCMD(['settime %s' % s])
+ return True
+ except self.DMError, e:
+ self.add_device_flag(DEVICE_CANT_SET_TIME)
+ self.fatal("Exception while setting device time: %s" % str(e))
+
+ def install_app(self, file_path):
+ dev_root = self.query_device_root(strict=True)
+ if not dev_root:
+ self.add_device_flag(DEVICE_UNREACHABLE)
+ # TODO wait_for_device?
+ self.fatal("dev_root %s not correct!" % str(dev_root))
+
+ dm = self.query_devicemanager()
+
+ c = self.config
+ if c.get('enable_automation'):
+ self.query_device_time()
+ self.set_device_time()
+ self.query_device_time()
+ dm.getInfo('process')
+ dm.getInfo('memory')
+ dm.getInfo('uptime')
+
+ # This target needs to not use os.path.join due to differences with win
+ # Paths vs. unix paths.
+ target = "/".join([dev_root, os.path.basename(file_path)])
+ self.info("Installing %s on device..." % file_path)
+ dm.pushFile(file_path, target)
+ # TODO screen resolution
+ # TODO do something with status?
+ try:
+ dm.installApp(target)
+ self.info('-' * 42)
+ self.info("Sleeping for 90 seconds...")
+ time.sleep(90)
+ self.info('installApp(%s) done - gathering debug info' % target)
+ try:
+ self.info(repr(dm.getInfo('process')))
+ self.info(repr(dm.getInfo('memory')))
+ self.info(repr(dm.getInfo('uptime')))
+ self.info(repr(dm.sendCMD(['exec su -c "logcat -d -v time *:W"'])))
+ except Exception, e:
+ self.info("Exception hit while trying to run logcat: %s" % str(e))
+ self.fatal("Remote Device Error: can't run logcat")
+ except self.DMError:
+ self.fatal("Remote Device Error: installApp() call failed - exiting")
+
+ def reboot_device(self):
+ dm = self.query_devicemanager()
+ # logcat?
+ self.info("Rebooting device...")
+ try:
+ dm.reboot()
+ except self.DMError:
+ self.add_device_flag(DEVICE_NOT_REBOOTED)
+ self.fatal("Can't reboot device!")
+ self.wait_for_device()
+ dm.getInfo('uptime')
+
+ # device type specific {{{2
+ def remove_etc_hosts(self, hosts_file="/system/etc/hosts"):
+ c = self.config
+ # TODO figure this out
+ if c['device_type'] not in ("tegra250",) or True:
+ self.debug("No need to remove /etc/hosts on a non-Tegra250.")
+ return
+ dm = self.query_devicemanager()
+ if dm.fileExists(hosts_file):
+ self.info("Removing %s file." % hosts_file)
+ try:
+ dm.sendCMD(['exec mount -o remount,rw -t yaffs2 /dev/block/mtdblock3 /system'])
+ dm.sendCMD(['exec rm %s' % hosts_file])
+ except self.DMError:
+ self.add_device_flag(DEVICE_CANT_REMOVE_ETC_HOSTS)
+ self.fatal("Unable to remove %s!" % hosts_file)
+ if dm.fileExists(hosts_file):
+ self.add_device_flag(DEVICE_CANT_REMOVE_ETC_HOSTS)
+ self.fatal("Unable to remove %s!" % hosts_file)
+ else:
+ self.debug("%s file doesn't exist; skipping." % hosts_file)
+
+
+# SUTDeviceMozdeviceMixin {{{1
+class SUTDeviceMozdeviceMixin(SUTDeviceHandler):
+ '''
+ This SUT device manager class makes calls through mozdevice (from mozbase) [1]
+ directly rather than calling SUT tools.
+
+ [1] https://github.com/mozilla/mozbase/blob/master/mozdevice/mozdevice/devicemanagerSUT.py
+ '''
+ dm = None
+
+ def query_devicemanager(self):
+ if self.dm:
+ return self.dm
+ sys.path.append(self.query_python_site_packages_path())
+ from mozdevice.devicemanagerSUT import DeviceManagerSUT
+ self.info("Connecting to: %s" % self.mozpool_device)
+ self.dm = DeviceManagerSUT(self.mozpool_device)
+ # No need for 300 second SUT socket timeouts here
+ self.dm.default_timeout = 30
+ return self.dm
+
+ def query_file(self, filename):
+ dm = self.query_devicemanager()
+ if not dm.fileExists(filename):
+ raise Exception("Expected file (%s) not found" % filename)
+
+ file_contents = dm.pullFile(filename)
+ if file_contents is None:
+ raise Exception("Unable to read file (%s)" % filename)
+
+ return file_contents
+
+ def set_device_epoch_time(self, timestamp=int(time.time())):
+ dm = self.query_devicemanager()
+ dm._runCmds([{'cmd': 'setutime %s' % timestamp}])
+ return dm._runCmds([{'cmd': 'clok'}])
+
+ def get_logcat(self):
+ dm = self.query_devicemanager()
+ return dm.getLogcat()
+
+
+# DeviceMixin {{{1
+DEVICE_PROTOCOL_DICT = {
+ 'adb': ADBDeviceHandler,
+ 'sut': SUTDeviceHandler,
+}
+
+device_config_options = [[
+ ["--device-ip"],
+ {"action": "store",
+ "dest": "device_ip",
+ "help": "Specify the IP address of the device."
+ }
+], [
+ ["--device-port"],
+ {"action": "store",
+ "dest": "device_port",
+ "help": "Specify the IP port of the device."
+ }
+], [
+ ["--device-heartbeat-port"],
+ {"action": "store",
+ "dest": "device_heartbeat_port",
+ "help": "Specify the heartbeat port of the SUT device."
+ }
+], [
+ ["--device-protocol"],
+ {"action": "store",
+ "type": "choice",
+ "dest": "device_protocol",
+ "choices": DEVICE_PROTOCOL_DICT.keys(),
+ "help": "Specify the device communication protocol."
+ }
+], [
+ ["--device-type"],
+ # A bit useless atm, but we can add new device types as we add support
+ # for them.
+ {"action": "store",
+ "type": "choice",
+ "choices": ["non-tegra", "tegra250"],
+ "default": "non-tegra",
+ "dest": "device_type",
+ "help": "Specify the device type."
+ }
+], [
+ ["--devicemanager-path"],
+ {"action": "store",
+ "dest": "devicemanager_path",
+ "help": "Specify the parent dir of devicemanagerSUT.py."
+ }
+]]
+
+
+class DeviceMixin(object):
+ '''BaseScript mixin, designed to interface with the device.
+
+ '''
+ device_handler = None
+ device_root = None
+
+ def query_device_handler(self):
+ if self.device_handler:
+ return self.device_handler
+ c = self.config
+ device_protocol = c.get('device_protocol')
+ device_class = DEVICE_PROTOCOL_DICT.get(device_protocol)
+ if not device_class:
+ self.fatal("Unknown device_protocol %s; set via --device-protocol!" % str(device_protocol))
+ self.device_handler = device_class(
+ log_obj=self.log_obj,
+ config=self.config,
+ script_obj=self,
+ )
+ return self.device_handler
+
+ def check_device(self):
+ dh = self.query_device_handler()
+ return dh.check_device()
+
+ def cleanup_device(self, **kwargs):
+ dh = self.query_device_handler()
+ return dh.cleanup_device(**kwargs)
+
+ def install_app(self):
+ dh = self.query_device_handler()
+ return dh.install_app(file_path=self.installer_path)
+
+ def reboot_device(self):
+ dh = self.query_device_handler()
+ return dh.reboot_device()
diff --git a/testing/mozharness/mozharness/mozilla/testing/errors.py b/testing/mozharness/mozharness/mozilla/testing/errors.py
new file mode 100644
index 000000000..3937b28c4
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/errors.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""Mozilla error lists for running tests.
+
+Error lists are used to parse output in mozharness.base.log.OutputParser.
+
+Each line of output is matched against each substring or regular expression
+in the error list. On a match, we determine the 'level' of that line,
+whether IGNORE, DEBUG, INFO, WARNING, ERROR, CRITICAL, or FATAL.
+
+"""
+
+import re
+from mozharness.base.log import INFO, WARNING, ERROR
+
+# ErrorLists {{{1
+_mochitest_summary = {
+ 'regex': re.compile(r'''(\d+ INFO (Passed|Failed|Todo):\ +(\d+)|\t(Passed|Failed|Todo): (\d+))'''),
+ 'pass_group': "Passed",
+ 'fail_group': "Failed",
+ 'known_fail_group': "Todo",
+}
+
+TinderBoxPrintRe = {
+ "mochitest_summary": _mochitest_summary,
+ "mochitest-chrome_summary": _mochitest_summary,
+ "mochitest-gl_summary": _mochitest_summary,
+ "mochitest-media_summary": _mochitest_summary,
+ "mochitest-plain-clipboard_summary": _mochitest_summary,
+ "mochitest-plain-gpu_summary": _mochitest_summary,
+ "marionette_summary": {
+ 'regex': re.compile(r'''(passed|failed|todo):\ +(\d+)'''),
+ 'pass_group': "passed",
+ 'fail_group': "failed",
+ 'known_fail_group': "todo",
+ },
+ "reftest_summary": {
+ 'regex': re.compile(r'''REFTEST INFO \| (Successful|Unexpected|Known problems): (\d+) \('''),
+ 'pass_group': "Successful",
+ 'fail_group': "Unexpected",
+ 'known_fail_group': "Known problems",
+ },
+ "crashtest_summary": {
+ 'regex': re.compile(r'''REFTEST INFO \| (Successful|Unexpected|Known problems): (\d+) \('''),
+ 'pass_group': "Successful",
+ 'fail_group': "Unexpected",
+ 'known_fail_group': "Known problems",
+ },
+ "xpcshell_summary": {
+ 'regex': re.compile(r'''INFO \| (Passed|Failed): (\d+)'''),
+ 'pass_group': "Passed",
+ 'fail_group': "Failed",
+ 'known_fail_group': None,
+ },
+ "jsreftest_summary": {
+ 'regex': re.compile(r'''REFTEST INFO \| (Successful|Unexpected|Known problems): (\d+) \('''),
+ 'pass_group': "Successful",
+ 'fail_group': "Unexpected",
+ 'known_fail_group': "Known problems",
+ },
+ "robocop_summary": _mochitest_summary,
+ "instrumentation_summary": _mochitest_summary,
+ "cppunittest_summary": {
+ 'regex': re.compile(r'''cppunittests INFO \| (Passed|Failed): (\d+)'''),
+ 'pass_group': "Passed",
+ 'fail_group': "Failed",
+ 'known_fail_group': None,
+ },
+ "gtest_summary": {
+ 'regex': re.compile(r'''(Passed|Failed): (\d+)'''),
+ 'pass_group': "Passed",
+ 'fail_group': "Failed",
+ 'known_fail_group': None,
+ },
+ "jittest_summary": {
+ 'regex': re.compile(r'''(Passed|Failed): (\d+)'''),
+ 'pass_group': "Passed",
+ 'fail_group': "Failed",
+ 'known_fail_group': None,
+ },
+ "mozbase_summary": {
+ 'regex': re.compile(r'''(OK)|(FAILED) \(errors=(\d+)'''),
+ 'pass_group': "OK",
+ 'fail_group': "FAILED",
+ 'known_fail_group': None,
+ },
+ "mozmill_summary": {
+ 'regex': re.compile(r'''INFO (Passed|Failed|Skipped): (\d+)'''),
+ 'pass_group': "Passed",
+ 'fail_group': "Failed",
+ 'known_fail_group': "Skipped",
+ },
+
+ "harness_error": {
+ 'full_regex': re.compile(r"(?:TEST-UNEXPECTED-FAIL|PROCESS-CRASH) \| .* \| (application crashed|missing output line for total leaks!|negative leaks caught!|\d+ bytes leaked)"),
+ 'minimum_regex': re.compile(r'''(TEST-UNEXPECTED|PROCESS-CRASH)'''),
+ 'retry_regex': re.compile(r'''(FAIL-SHOULD-RETRY|No space left on device|DMError|Connection to the other side was lost in a non-clean fashion|program finished with exit code 80|INFRA-ERROR|twisted.spread.pb.PBConnectionLost|_dl_open: Assertion|Timeout exceeded for _runCmd call)''')
+ },
+}
+
+TestPassed = [
+ {'regex': re.compile('''(TEST-INFO|TEST-KNOWN-FAIL|TEST-PASS|INFO \| )'''), 'level': INFO},
+]
+
+HarnessErrorList = [
+ {'substr': 'TEST-UNEXPECTED', 'level': ERROR, },
+ {'substr': 'PROCESS-CRASH', 'level': ERROR, },
+]
+
+LogcatErrorList = [
+ {'substr': 'Fatal signal 11 (SIGSEGV)', 'level': ERROR, 'explanation': 'This usually indicates the B2G process has crashed'},
+ {'substr': 'Fatal signal 7 (SIGBUS)', 'level': ERROR, 'explanation': 'This usually indicates the B2G process has crashed'},
+ {'substr': '[JavaScript Error:', 'level': WARNING},
+ {'substr': 'seccomp sandbox violation', 'level': ERROR, 'explanation': 'A content process has violated the system call sandbox (bug 790923)'},
+]
diff --git a/testing/mozharness/mozharness/mozilla/testing/firefox_media_tests.py b/testing/mozharness/mozharness/mozilla/testing/firefox_media_tests.py
new file mode 100644
index 000000000..b1874fc13
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/firefox_media_tests.py
@@ -0,0 +1,289 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+
+import copy
+import os
+import re
+import urlparse
+
+from mozharness.base.log import ERROR, WARNING
+from mozharness.base.script import PreScriptAction
+from mozharness.mozilla.testing.testbase import (TestingMixin,
+ testing_config_options)
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+from mozharness.mozilla.vcstools import VCSToolsScript
+
+BUSTED = 'busted'
+TESTFAILED = 'testfailed'
+UNKNOWN = 'unknown'
+EXCEPTION = 'exception'
+SUCCESS = 'success'
+
+media_test_config_options = [
+ [["--media-urls"],
+ {"action": "store",
+ "dest": "media_urls",
+ "help": "Path to ini file that lists media urls for tests.",
+ }],
+ [["--profile"],
+ {"action": "store",
+ "dest": "profile",
+ "default": None,
+ "help": "Path to FF profile that should be used by Marionette",
+ }],
+ [["--test-timeout"],
+ {"action": "store",
+ "dest": "test_timeout",
+ "default": 10000,
+ "help": ("Number of seconds without output before"
+ "firefox-media-tests is killed."
+ "Set this based on expected time for all media to play."),
+ }],
+ [["--tests"],
+ {"action": "store",
+ "dest": "tests",
+ "default": None,
+ "help": ("Test(s) to run. Path to test_*.py or "
+ "test manifest (*.ini)"),
+ }],
+ [["--e10s"],
+ {"dest": "e10s",
+ "action": "store_true",
+ "default": False,
+ "help": "Enable e10s when running marionette tests."
+ }],
+ [["--suite"],
+ {"action": "store",
+ "dest": "test_suite",
+ "default": "media-tests",
+ "help": "suite name",
+ }],
+ [['--browsermob-script'],
+ {'help': 'path to the browsermob-proxy shell script or batch file',
+ }],
+ [['--browsermob-port'],
+ {'help': 'port to run the browsermob proxy on',
+ }],
+ [["--allow-software-gl-layers"],
+ {"action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."
+ }],
+] + (copy.deepcopy(testing_config_options))
+
+class JobResultParser(TestSummaryOutputParserHelper):
+ """ Parses test output to determine overall result."""
+ def __init__(self, **kwargs):
+ super(JobResultParser, self).__init__(**kwargs)
+ self.return_code = 0
+ # External-resource errors that should not count as test failures
+ self.exception_re = re.compile(r'^TEST-UNEXPECTED-ERROR.*'
+ r'TimeoutException: Error loading page,'
+ r' timed out')
+ self.exceptions = []
+
+ def parse_single_line(self, line):
+ super(JobResultParser, self).parse_single_line(line)
+ if self.exception_re.match(line):
+ self.exceptions.append(line)
+
+ @property
+ def status(self):
+ status = UNKNOWN
+ if self.passed and self.failed == 0:
+ status = SUCCESS
+ elif self.exceptions:
+ status = EXCEPTION
+ elif self.failed:
+ status = TESTFAILED
+ elif self.return_code:
+ status = BUSTED
+ return status
+
+
+class FirefoxMediaTestsBase(TestingMixin, VCSToolsScript):
+ job_result_parser = None
+
+ error_list = [
+ {'substr': 'FAILED (errors=', 'level': WARNING},
+ {'substr': r'''Could not successfully complete transport of message to Gecko, socket closed''', 'level': ERROR},
+ {'substr': r'''Connection to Marionette server is lost. Check gecko''', 'level': ERROR},
+ {'substr': 'Timeout waiting for marionette on port', 'level': ERROR},
+ {'regex': re.compile(r'''(TEST-UNEXPECTED|PROCESS-CRASH|CRASH|ERROR|FAIL)'''), 'level': ERROR},
+ {'regex': re.compile(r'''(\b\w*Exception)'''), 'level': ERROR},
+ {'regex': re.compile(r'''(\b\w*Error)'''), 'level': ERROR},
+ ]
+
+ def __init__(self, config_options=None, all_actions=None,
+ default_actions=None, **kwargs):
+ self.config_options = media_test_config_options + (config_options or [])
+ actions = [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ]
+ super(FirefoxMediaTestsBase, self).__init__(
+ config_options=self.config_options,
+ all_actions=all_actions or actions,
+ default_actions=default_actions or actions,
+ **kwargs
+ )
+ c = self.config
+
+ self.media_urls = c.get('media_urls')
+ self.profile = c.get('profile')
+ self.test_timeout = int(c.get('test_timeout'))
+ self.tests = c.get('tests')
+ self.e10s = c.get('e10s')
+ self.installer_url = c.get('installer_url')
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.test_packages_url = c.get('test_packages_url')
+ self.test_url = c.get('test_url')
+ self.browsermob_script = c.get('browsermob_script')
+ self.browsermob_port = c.get('browsermob_port')
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+
+ media_tests_requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'external-media-tests-requirements.txt')
+
+ if os.access(media_tests_requirements, os.F_OK):
+ self.register_virtualenv_module(requirements=[media_tests_requirements],
+ two_pass=True)
+
+ def download_and_extract(self):
+ """Overriding method from TestingMixin for more specific behavior.
+
+ We use the test_packages_url command line argument to check where to get the
+ harness, puppeteer, and tests from and how to set them up.
+
+ """
+ extract_dirs = ['config/*',
+ 'external-media-tests/*',
+ 'marionette/*',
+ 'mozbase/*',
+ 'puppeteer/*',
+ 'tools/wptserve/*',
+ ]
+ super(FirefoxMediaTestsBase, self).download_and_extract(extract_dirs=extract_dirs)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(FirefoxMediaTestsBase, self).query_abs_dirs()
+ dirs = {
+ 'abs_test_install_dir' : os.path.join(abs_dirs['abs_work_dir'],
+ 'tests')
+ }
+ dirs['external-media-tests'] = os.path.join(dirs['abs_test_install_dir'],
+ 'external-media-tests')
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def _query_cmd(self):
+ """ Determine how to call firefox-media-tests """
+ if not self.binary_path:
+ self.fatal("Binary path could not be determined. "
+ "Should be set by default during 'install' action.")
+ dirs = self.query_abs_dirs()
+
+ import external_media_harness.runtests
+
+ cmd = [
+ self.query_python_path(),
+ external_media_harness.runtests.__file__
+ ]
+
+ cmd += ['--binary', self.binary_path]
+ if self.symbols_path:
+ cmd += ['--symbols-path', self.symbols_path]
+ if self.media_urls:
+ cmd += ['--urls', self.media_urls]
+ if self.profile:
+ cmd += ['--profile', self.profile]
+ if self.tests:
+ cmd.append(self.tests)
+ if not self.e10s:
+ cmd.append('--disable-e10s')
+ if self.browsermob_script:
+ cmd += ['--browsermob-script', self.browsermob_script]
+ if self.browsermob_port:
+ cmd += ['--browsermob-port', self.browsermob_port]
+
+ test_suite = self.config.get('test_suite')
+ if test_suite not in self.config["suite_definitions"]:
+ self.fatal("%s is not defined in the config!" % test_suite)
+
+ test_manifest = None if test_suite != 'media-youtube-tests' else \
+ os.path.join(dirs['external-media-tests'],
+ 'external_media_tests',
+ 'playback', 'youtube', 'manifest.ini')
+ config_fmt_args = {
+ 'test_manifest': test_manifest,
+ }
+
+ for s in self.config["suite_definitions"][test_suite]["options"]:
+ cmd.append(s % config_fmt_args)
+
+ return cmd
+
+ def query_minidump_stackwalk(self):
+ """We don't have an extracted test package available to get the manifest file.
+
+ So we have to explicitely download the latest version of the manifest from the
+ mozilla-central repository and feed it into the query_minidump_stackwalk() method.
+
+ We can remove this whole method once our tests are part of the tree.
+
+ """
+ manifest_path = None
+
+ if os.environ.get('MINIDUMP_STACKWALK') or self.config.get('download_minidump_stackwalk'):
+ tooltool_manifest = self.query_minidump_tooltool_manifest()
+ url_base = 'https://hg.mozilla.org/mozilla-central/raw-file/default/testing/'
+
+ dirs = self.query_abs_dirs()
+ manifest_path = os.path.join(dirs['abs_work_dir'], 'releng.manifest')
+ try:
+ self.download_file(urlparse.urljoin(url_base, tooltool_manifest),
+ manifest_path)
+ except Exception as e:
+ self.fatal('Download of tooltool manifest file failed: %s' % e.message)
+
+ return super(FirefoxMediaTestsBase, self).query_minidump_stackwalk(manifest=manifest_path)
+
+ def run_media_tests(self):
+ cmd = self._query_cmd()
+ self.job_result_parser = JobResultParser(
+ config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list
+ )
+
+ env = self.query_env()
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ return_code = self.run_command(
+ cmd,
+ output_timeout=self.test_timeout,
+ output_parser=self.job_result_parser,
+ env=env
+ )
+ self.job_result_parser.return_code = return_code
+ return self.job_result_parser.status
diff --git a/testing/mozharness/mozharness/mozilla/testing/firefox_ui_tests.py b/testing/mozharness/mozharness/mozilla/testing/firefox_ui_tests.py
new file mode 100644
index 000000000..684ec3a73
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/firefox_ui_tests.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import copy
+import os
+import sys
+
+from mozharness.base.log import FATAL, WARNING
+from mozharness.base.python import PostScriptRun, PreScriptAction
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+from mozharness.mozilla.testing.testbase import (
+ TestingMixin,
+ testing_config_options,
+)
+from mozharness.mozilla.vcstools import VCSToolsScript
+
+
+# General command line arguments for Firefox ui tests
+firefox_ui_tests_config_options = [
+ [["--allow-software-gl-layers"], {
+ "action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor.",
+ }],
+ [['--dry-run'], {
+ 'dest': 'dry_run',
+ 'default': False,
+ 'help': 'Only show what was going to be tested.',
+ }],
+ [["--e10s"], {
+ 'dest': 'e10s',
+ 'action': 'store_true',
+ 'default': False,
+ 'help': 'Enable multi-process (e10s) mode when running tests.',
+ }],
+ [['--symbols-path=SYMBOLS_PATH'], {
+ 'dest': 'symbols_path',
+ 'help': 'absolute path to directory containing breakpad '
+ 'symbols, or the url of a zip file containing symbols.',
+ }],
+ [['--tag=TAG'], {
+ 'dest': 'tag',
+ 'help': 'Subset of tests to run (local, remote).',
+ }],
+] + copy.deepcopy(testing_config_options)
+
+# Command line arguments for update tests
+firefox_ui_update_harness_config_options = [
+ [['--update-allow-mar-channel'], {
+ 'dest': 'update_allow_mar_channel',
+ 'help': 'Additional MAR channel to be allowed for updates, e.g. '
+ '"firefox-mozilla-beta" for updating a release build to '
+ 'the latest beta build.',
+ }],
+ [['--update-channel'], {
+ 'dest': 'update_channel',
+ 'help': 'Update channel to use.',
+ }],
+ [['--update-direct-only'], {
+ 'action': 'store_true',
+ 'dest': 'update_direct_only',
+ 'help': 'Only perform a direct update.',
+ }],
+ [['--update-fallback-only'], {
+ 'action': 'store_true',
+ 'dest': 'update_fallback_only',
+ 'help': 'Only perform a fallback update.',
+ }],
+ [['--update-override-url'], {
+ 'dest': 'update_override_url',
+ 'help': 'Force specified URL to use for update checks.',
+ }],
+ [['--update-target-buildid'], {
+ 'dest': 'update_target_buildid',
+ 'help': 'Build ID of the updated build',
+ }],
+ [['--update-target-version'], {
+ 'dest': 'update_target_version',
+ 'help': 'Version of the updated build.',
+ }],
+]
+
+firefox_ui_update_config_options = firefox_ui_update_harness_config_options \
+ + copy.deepcopy(firefox_ui_tests_config_options)
+
+
+class FirefoxUITests(TestingMixin, VCSToolsScript):
+
+ # Needs to be overwritten in sub classes
+ cli_script = None
+
+ def __init__(self, config_options=None,
+ all_actions=None, default_actions=None,
+ *args, **kwargs):
+ config_options = config_options or firefox_ui_tests_config_options
+ actions = [
+ 'clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ 'uninstall',
+ ]
+
+ super(FirefoxUITests, self).__init__(
+ config_options=config_options,
+ all_actions=all_actions or actions,
+ default_actions=default_actions or actions,
+ *args, **kwargs)
+
+ # Code which doesn't run on buildbot has to include the following properties
+ self.binary_path = self.config.get('binary_path')
+ self.installer_path = self.config.get('installer_path')
+ self.installer_url = self.config.get('installer_url')
+ self.test_packages_url = self.config.get('test_packages_url')
+ self.test_url = self.config.get('test_url')
+
+ if not self.test_url and not self.test_packages_url:
+ self.fatal(
+ 'You must use --test-url, or --test-packages-url')
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config', 'firefox_ui_requirements.txt')
+ self.register_virtualenv_module(requirements=[requirements], two_pass=True)
+
+ def download_and_extract(self):
+ """Override method from TestingMixin for more specific behavior."""
+ extract_dirs = ['config/*',
+ 'firefox-ui/*',
+ 'marionette/*',
+ 'mozbase/*',
+ 'tools/wptserve/*',
+ ]
+ super(FirefoxUITests, self).download_and_extract(extract_dirs=extract_dirs)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+
+ abs_dirs = super(FirefoxUITests, self).query_abs_dirs()
+ abs_tests_install_dir = os.path.join(abs_dirs['abs_work_dir'], 'tests')
+
+ dirs = {
+ 'abs_blob_upload_dir': os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir'),
+ 'abs_test_install_dir': abs_tests_install_dir,
+ 'abs_fxui_dir': os.path.join(abs_tests_install_dir, 'firefox-ui'),
+ }
+
+ for key in dirs:
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def query_harness_args(self, extra_harness_config_options=None):
+ """Collects specific test related command line arguments.
+
+ Sub classes should override this method for their own specific arguments.
+ """
+ config_options = extra_harness_config_options or []
+
+ args = []
+ for option in config_options:
+ dest = option[1]['dest']
+ name = self.config.get(dest)
+
+ if name:
+ if type(name) is bool:
+ args.append(option[0][0])
+ else:
+ args.extend([option[0][0], self.config[dest]])
+
+ return args
+
+ def run_test(self, binary_path, env=None, marionette_port=2828):
+ """All required steps for running the tests against an installer."""
+ dirs = self.query_abs_dirs()
+
+ # Import the harness to retrieve the location of the cli scripts
+ import firefox_ui_harness
+
+ cmd = [
+ self.query_python_path(),
+ os.path.join(os.path.dirname(firefox_ui_harness.__file__),
+ self.cli_script),
+ '--binary', binary_path,
+ '--address', 'localhost:{}'.format(marionette_port),
+
+ # Resource files to serve via local webserver
+ '--server-root', os.path.join(dirs['abs_fxui_dir'], 'resources'),
+
+ # Use the work dir to get temporary data stored
+ '--workspace', dirs['abs_work_dir'],
+
+ # logging options
+ '--gecko-log=-', # output from the gecko process redirected to stdout
+ '--log-raw=-', # structured log for output parser redirected to stdout
+
+ # additional reports helpful for Jenkins and inpection via Treeherder
+ '--log-html', os.path.join(dirs['abs_blob_upload_dir'], 'report.html'),
+ '--log-xunit', os.path.join(dirs['abs_blob_upload_dir'], 'report.xml'),
+
+ # Enable tracing output to log transmission protocol
+ '-vv',
+ ]
+
+ # Collect all pass-through harness options to the script
+ cmd.extend(self.query_harness_args())
+
+ # Translate deprecated --e10s flag
+ if not self.config.get('e10s'):
+ cmd.append('--disable-e10s')
+
+ if self.symbols_url:
+ cmd.extend(['--symbols-path', self.symbols_url])
+
+ if self.config.get('tag'):
+ cmd.extend(['--tag', self.config['tag']])
+
+ parser = StructuredOutputParser(config=self.config,
+ log_obj=self.log_obj,
+ strict=False)
+
+ # Add the default tests to run
+ tests = [os.path.join(dirs['abs_fxui_dir'], 'tests', test) for test in self.default_tests]
+ cmd.extend(tests)
+
+ # Set further environment settings
+ env = env or self.query_env()
+ env.update({'MINIDUMP_SAVE_PATH': dirs['abs_blob_upload_dir']})
+ if self.query_minidump_stackwalk():
+ env.update({'MINIDUMP_STACKWALK': self.minidump_stackwalk_path})
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ return_code = self.run_command(cmd,
+ cwd=dirs['abs_work_dir'],
+ output_timeout=300,
+ output_parser=parser,
+ env=env)
+
+ tbpl_status, log_level = parser.evaluate_parser(return_code)
+ self.buildbot_status(tbpl_status, level=log_level)
+
+ return return_code
+
+ @PreScriptAction('run-tests')
+ def _pre_run_tests(self, action):
+ if not self.installer_path and not self.installer_url:
+ self.critical('Please specify an installer via --installer-path or --installer-url.')
+ sys.exit(1)
+
+ def run_tests(self):
+ """Run all the tests"""
+ return self.run_test(
+ binary_path=self.binary_path,
+ env=self.query_env(),
+ )
+
+
+class FirefoxUIFunctionalTests(FirefoxUITests):
+
+ cli_script = 'cli_functional.py'
+ default_tests = [
+ os.path.join('puppeteer', 'manifest.ini'),
+ os.path.join('functional', 'manifest.ini'),
+ ]
+
+
+class FirefoxUIUpdateTests(FirefoxUITests):
+
+ cli_script = 'cli_update.py'
+ default_tests = [
+ os.path.join('update', 'manifest.ini')
+ ]
+
+ def __init__(self, config_options=None, *args, **kwargs):
+ config_options = config_options or firefox_ui_update_config_options
+
+ super(FirefoxUIUpdateTests, self).__init__(
+ config_options=config_options,
+ *args, **kwargs
+ )
+
+ def query_harness_args(self):
+ """Collects specific update test related command line arguments."""
+ return super(FirefoxUIUpdateTests, self).query_harness_args(
+ firefox_ui_update_harness_config_options)
diff --git a/testing/mozharness/mozharness/mozilla/testing/mozpool.py b/testing/mozharness/mozharness/mozilla/testing/mozpool.py
new file mode 100644
index 000000000..f9da6c190
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/mozpool.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+'''Interact with mozpool/lifeguard/bmm.
+'''
+
+import os
+import socket
+import sys
+
+from time import sleep
+from mozharness.mozilla.buildbot import TBPL_RETRY, TBPL_EXCEPTION
+
+#TODO - adjust these values
+MAX_RETRIES = 20
+RETRY_INTERVAL = 60
+
+# MozpoolMixin {{{1
+class MozpoolMixin(object):
+ mozpool_handler = None
+ mobile_imaging_format= "http://mobile-imaging"
+
+ def determine_mozpool_host(self, device):
+ if "mobile_imaging_format" in self.config:
+ self.mobile_imaging_format = self.config["mobile_imaging_format"]
+ hostname = str(self.mobile_imaging_format)[7:]
+ fqdn = socket.getfqdn(hostname)
+ imaging_server_fqdn = (str(self.mobile_imaging_format)).replace(hostname, fqdn)
+ return imaging_server_fqdn
+
+ def query_mozpool_handler(self, device=None, mozpool_api_url=None):
+ if self.mozpool_handler != None:
+ return self.mozpool_handler
+ else:
+ self.mozpool_api_url = self.determine_mozpool_host(device) if device else mozpool_api_url
+ assert self.mozpool_api_url != None, \
+ "query_mozpool_handler() requires either a device or mozpool_api_url!"
+
+ site_packages_path = self.query_python_site_packages_path()
+ mph_path = os.path.join(site_packages_path, 'mozpoolclient')
+ sys.path.append(mph_path)
+ sys.path.append(site_packages_path)
+ try:
+ from mozpoolclient import MozpoolHandler, MozpoolException, MozpoolConflictException
+ self.MozpoolException = MozpoolException
+ self.MozpoolConflictException = MozpoolConflictException
+ self.mozpool_handler = MozpoolHandler(self.mozpool_api_url, log_obj=self)
+ except ImportError, e:
+ self.fatal("Can't instantiate MozpoolHandler until mozpoolclient python "
+ "package is installed! (VirtualenvMixin?): \n%s" % str(e))
+ return self.mozpool_handler
+
+ def retrieve_b2g_device(self, b2gbase):
+ mph = self.query_mozpool_handler(self.mozpool_device)
+ for retry in self._retry_sleep(
+ error_message="INFRA-ERROR: Could not request device '%s'" % self.mozpool_device,
+ tbpl_status=TBPL_EXCEPTION):
+ try:
+ image = 'b2g'
+ duration = 4 * 60 * 60 # request valid for 14400 seconds == 4 hours
+ response = mph.request_device(self.mozpool_device, image, assignee=self.mozpool_assignee, \
+ b2gbase=b2gbase, pxe_config=None, duration=duration)
+ break
+ except self.MozpoolConflictException:
+ self.warning("Device unavailable. Retry#%i.." % retry)
+ except self.MozpoolException, e:
+ self.buildbot_status(TBPL_RETRY)
+ self.fatal("We could not request the device: %s" % str(e))
+
+ self.request_url = response['request']['url']
+ self.info("Got request, url=%s" % self.request_url)
+ self._wait_for_request_ready()
+
+ def retrieve_android_device(self, b2gbase):
+ mph = self.query_mozpool_handler(self.mozpool_device)
+ for retry in self._retry_sleep(
+ error_message="INFRA-ERROR: Could not request device '%s'" % self.mozpool_device,
+ tbpl_status=TBPL_RETRY):
+ try:
+ image = 'panda-android-4.0.4_v3.3'
+ duration = 4 * 60 * 60 # request valid for 14400 seconds == 4 hours
+ response = mph.request_device(self.mozpool_device, image, assignee=self.mozpool_assignee, \
+ b2gbase=b2gbase, pxe_config=None, duration=duration)
+ break
+ except self.MozpoolConflictException:
+ self.warning("Device unavailable. Retry#%i.." % retry)
+ except self.MozpoolException, e:
+ self.buildbot_status(TBPL_RETRY)
+ self.fatal("We could not request the device: %s" % str(e))
+
+ self.request_url = response['request']['url']
+ self.info("Got request, url=%s" % self.request_url)
+ self._wait_for_request_ready()
+
+ def _retry_job_and_close_request(self, message, exception=None):
+ mph = self.query_mozpool_handler(self.mozpool_device)
+ exception_message = str(exception) if exception!=None and str(exception) != None else ""
+ self.error("%s -> %s" % (message, exception_message))
+ if self.request_url:
+ mph.close_request(self.request_url)
+ self.buildbot_status(TBPL_RETRY)
+ self.fatal(message)
+
+ def _retry_sleep(self, sleep_time=RETRY_INTERVAL, max_retries=MAX_RETRIES,
+ error_message=None, tbpl_status=None, fail_cb=None):
+ for x in range(1, max_retries + 1):
+ yield x
+ sleep(sleep_time)
+ if error_message:
+ self.error(error_message)
+ if tbpl_status:
+ self.buildbot_status(tbpl_status)
+ if fail_cb:
+ assert callable(fail_cb)
+ fail_cb()
+ self.fatal('Retries limit exceeded')
+
+ def _wait_for_request_ready(self):
+ mph = self.query_mozpool_handler(self.mozpool_device)
+ def on_fail():
+ # Device is not ready after retries...
+ self.info("Aborting mozpool request.")
+ self.close_request()
+ for retry in self._retry_sleep(sleep_time=RETRY_INTERVAL, max_retries=MAX_RETRIES,
+ error_message="INFRA-ERROR: Request did not become ready in time",
+ tbpl_status=TBPL_EXCEPTION, fail_cb=on_fail):
+ response = mph.query_request_status(self.request_url)
+ state = response['state']
+ if state == 'ready':
+ return
+ self.info("Waiting for request 'ready' stage. Current state: '%s'" % state)
diff --git a/testing/mozharness/mozharness/mozilla/testing/talos.py b/testing/mozharness/mozharness/mozilla/testing/talos.py
new file mode 100755
index 000000000..73f384ce7
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/talos.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""
+run talos tests in a virtualenv
+"""
+
+import os
+import pprint
+import copy
+import re
+import json
+
+import mozharness
+from mozharness.base.config import parse_config_file
+from mozharness.base.errors import PythonErrorList
+from mozharness.base.log import OutputParser, DEBUG, ERROR, CRITICAL
+from mozharness.base.log import INFO, WARNING
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.testing.errors import TinderBoxPrintRe
+from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WORST_LEVEL_TUPLE
+from mozharness.mozilla.buildbot import TBPL_RETRY, TBPL_FAILURE, TBPL_WARNING
+
+external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))),
+ 'external_tools',
+)
+
+TalosErrorList = PythonErrorList + [
+ {'regex': re.compile(r'''run-as: Package '.*' is unknown'''), 'level': DEBUG},
+ {'substr': r'''FAIL: Graph server unreachable''', 'level': CRITICAL},
+ {'substr': r'''FAIL: Busted:''', 'level': CRITICAL},
+ {'substr': r'''FAIL: failed to cleanup''', 'level': ERROR},
+ {'substr': r'''erfConfigurator.py: Unknown error''', 'level': CRITICAL},
+ {'substr': r'''talosError''', 'level': CRITICAL},
+ {'regex': re.compile(r'''No machine_name called '.*' can be found'''), 'level': CRITICAL},
+ {'substr': r"""No such file or directory: 'browser_output.txt'""",
+ 'level': CRITICAL,
+ 'explanation': r"""Most likely the browser failed to launch, or the test was otherwise unsuccessful in even starting."""},
+]
+
+# TODO: check for running processes on script invocation
+
+class TalosOutputParser(OutputParser):
+ minidump_regex = re.compile(r'''talosError: "error executing: '(\S+) (\S+) (\S+)'"''')
+ RE_PERF_DATA = re.compile(r'.*PERFHERDER_DATA:\s+(\{.*\})')
+ worst_tbpl_status = TBPL_SUCCESS
+
+ def __init__(self, **kwargs):
+ super(TalosOutputParser, self).__init__(**kwargs)
+ self.minidump_output = None
+ self.found_perf_data = []
+
+ def update_worst_log_and_tbpl_levels(self, log_level, tbpl_level):
+ self.worst_log_level = self.worst_level(log_level,
+ self.worst_log_level)
+ self.worst_tbpl_status = self.worst_level(
+ tbpl_level, self.worst_tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE
+ )
+
+ def parse_single_line(self, line):
+ """ In Talos land, every line that starts with RETURN: needs to be
+ printed with a TinderboxPrint:"""
+ if line.startswith("RETURN:"):
+ line.replace("RETURN:", "TinderboxPrint:")
+ m = self.minidump_regex.search(line)
+ if m:
+ self.minidump_output = (m.group(1), m.group(2), m.group(3))
+
+ m = self.RE_PERF_DATA.match(line)
+ if m:
+ self.found_perf_data.append(m.group(1))
+
+ # now let's check if buildbot should retry
+ harness_retry_re = TinderBoxPrintRe['harness_error']['retry_regex']
+ if harness_retry_re.search(line):
+ self.critical(' %s' % line)
+ self.update_worst_log_and_tbpl_levels(CRITICAL, TBPL_RETRY)
+ return # skip base parse_single_line
+ super(TalosOutputParser, self).parse_single_line(line)
+
+
+class Talos(TestingMixin, MercurialScript, BlobUploadMixin):
+ """
+ install and run Talos tests:
+ https://wiki.mozilla.org/Buildbot/Talos
+ """
+ config_options = [
+ [["--use-talos-json"],
+ {"action": "store_true",
+ "dest": "use_talos_json",
+ "default": False,
+ "help": "Use talos config from talos.json"
+ }],
+ [["--suite"],
+ {"action": "store",
+ "dest": "suite",
+ "help": "Talos suite to run (from talos json)"
+ }],
+ [["--branch-name"],
+ {"action": "store",
+ "dest": "branch",
+ "help": "Graphserver branch to report to"
+ }],
+ [["--system-bits"],
+ {"action": "store",
+ "dest": "system_bits",
+ "type": "choice",
+ "default": "32",
+ "choices": ['32', '64'],
+ "help": "Testing 32 or 64 (for talos json plugins)"
+ }],
+ [["--add-option"],
+ {"action": "extend",
+ "dest": "talos_extra_options",
+ "default": None,
+ "help": "extra options to talos"
+ }],
+ [["--spsProfile"], {
+ "dest": "sps_profile",
+ "action": "store_true",
+ "default": False,
+ "help": "Whether or not to profile the test run and save the profile results"
+ }],
+ [["--spsProfileInterval"], {
+ "dest": "sps_profile_interval",
+ "type": "int",
+ "default": 0,
+ "help": "The interval between samples taken by the profiler (milliseconds)"
+ }],
+ ] + testing_config_options + copy.deepcopy(blobupload_config_options)
+
+ def __init__(self, **kwargs):
+ kwargs.setdefault('config_options', self.config_options)
+ kwargs.setdefault('all_actions', ['clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'populate-webroot',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ ])
+ kwargs.setdefault('default_actions', ['clobber',
+ 'download-and-extract',
+ 'populate-webroot',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests',
+ ])
+ kwargs.setdefault('config', {})
+ super(Talos, self).__init__(**kwargs)
+
+ self.workdir = self.query_abs_dirs()['abs_work_dir'] # convenience
+
+ self.run_local = self.config.get('run_local')
+ self.installer_url = self.config.get("installer_url")
+ self.talos_json_url = self.config.get("talos_json_url")
+ self.talos_json = self.config.get("talos_json")
+ self.talos_json_config = self.config.get("talos_json_config")
+ self.tests = None
+ self.pagesets_url = None
+ self.sps_profile = self.config.get('sps_profile')
+ self.sps_profile_interval = self.config.get('sps_profile_interval')
+
+ # We accept some configuration options from the try commit message in the format mozharness: <options>
+ # Example try commit message:
+ # mozharness: --spsProfile try: <stuff>
+ def query_sps_profile_options(self):
+ sps_results = []
+ if self.buildbot_config:
+ # this is inside automation
+ # now let's see if we added spsProfile specs in the commit message
+ try:
+ junk, junk, opts = self.buildbot_config['sourcestamp']['changes'][-1]['comments'].partition('mozharness:')
+ except IndexError:
+ # when we don't have comments on changes (bug 1255187)
+ opts = None
+
+ if opts:
+ # In the case of a multi-line commit message, only examine
+ # the first line for mozharness options
+ opts = opts.split('\n')[0]
+ opts = re.sub(r'\w+:.*', '', opts).strip().split(' ')
+ if "--spsProfile" in opts:
+ # overwrite whatever was set here.
+ self.sps_profile = True
+ try:
+ idx = opts.index('--spsProfileInterval')
+ if len(opts) > idx + 1:
+ self.sps_profile_interval = opts[idx + 1]
+ except ValueError:
+ pass
+ # finally, if sps_profile is set, we add that to the talos options
+ if self.sps_profile:
+ sps_results.append('--spsProfile')
+ if self.sps_profile_interval:
+ sps_results.extend(
+ ['--spsProfileInterval', str(self.sps_profile_interval)]
+ )
+ return sps_results
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(Talos, self).query_abs_dirs()
+ abs_dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def query_talos_json_config(self):
+ """Return the talos json config."""
+ if self.talos_json_config:
+ return self.talos_json_config
+ if not self.talos_json:
+ self.talos_json = os.path.join(self.talos_path, 'talos.json')
+ self.talos_json_config = parse_config_file(self.talos_json)
+ self.info(pprint.pformat(self.talos_json_config))
+ return self.talos_json_config
+
+ def query_pagesets_url(self):
+ """Certain suites require external pagesets to be downloaded and
+ extracted.
+ """
+ if self.pagesets_url:
+ return self.pagesets_url
+ if self.query_talos_json_config() and 'suite' in self.config:
+ self.pagesets_url = self.talos_json_config['suites'][self.config['suite']].get('pagesets_url')
+ return self.pagesets_url
+
+ def talos_options(self, args=None, **kw):
+ """return options to talos"""
+ # binary path
+ binary_path = self.binary_path or self.config.get('binary_path')
+ if not binary_path:
+ self.fatal("Talos requires a path to the binary. You can specify binary_path or add download-and-extract to your action list.")
+
+ # talos options
+ options = []
+ # talos can't gather data if the process name ends with '.exe'
+ if binary_path.endswith('.exe'):
+ binary_path = binary_path[:-4]
+ # options overwritten from **kw
+ kw_options = {'executablePath': binary_path}
+ if 'suite' in self.config:
+ kw_options['suite'] = self.config['suite']
+ if self.config.get('title'):
+ kw_options['title'] = self.config['title']
+ if self.config.get('branch'):
+ kw_options['branchName'] = self.config['branch']
+ if self.symbols_path:
+ kw_options['symbolsPath'] = self.symbols_path
+ kw_options.update(kw)
+ # talos expects tests to be in the format (e.g.) 'ts:tp5:tsvg'
+ tests = kw_options.get('activeTests')
+ if tests and not isinstance(tests, basestring):
+ tests = ':'.join(tests) # Talos expects this format
+ kw_options['activeTests'] = tests
+ for key, value in kw_options.items():
+ options.extend(['--%s' % key, value])
+ # configure profiling options
+ options.extend(self.query_sps_profile_options())
+ # extra arguments
+ if args is not None:
+ options += args
+ if 'talos_extra_options' in self.config:
+ options += self.config['talos_extra_options']
+ return options
+
+ def populate_webroot(self):
+ """Populate the production test slaves' webroots"""
+ c = self.config
+
+ self.talos_path = os.path.join(
+ self.query_abs_dirs()['abs_work_dir'], 'tests', 'talos'
+ )
+ if c.get('run_local'):
+ self.talos_path = os.path.dirname(self.talos_json)
+
+ src_talos_webdir = os.path.join(self.talos_path, 'talos')
+
+ if self.query_pagesets_url():
+ self.info('Downloading pageset...')
+ dirs = self.query_abs_dirs()
+ src_talos_pageset = os.path.join(src_talos_webdir, 'tests')
+ archive = self.download_file(self.pagesets_url, parent_dir=dirs['abs_work_dir'])
+ unzip = self.query_exe('unzip')
+ unzip_cmd = [unzip, '-q', '-o', archive, '-d', src_talos_pageset]
+ self.run_command(unzip_cmd, halt_on_failure=True)
+
+ # Action methods. {{{1
+ # clobber defined in BaseScript
+ # read_buildbot_config defined in BuildbotMixin
+
+ def download_and_extract(self, extract_dirs=None, suite_categories=None):
+ return super(Talos, self).download_and_extract(
+ suite_categories=['common', 'talos']
+ )
+
+ def create_virtualenv(self, **kwargs):
+ """VirtualenvMixin.create_virtualenv() assuemes we're using
+ self.config['virtualenv_modules']. Since we are installing
+ talos from its source, we have to wrap that method here."""
+ # install mozbase first, so we use in-tree versions
+ if not self.run_local:
+ mozbase_requirements = os.path.join(
+ self.query_abs_dirs()['abs_work_dir'],
+ 'tests',
+ 'config',
+ 'mozbase_requirements.txt'
+ )
+ else:
+ mozbase_requirements = os.path.join(
+ os.path.dirname(self.talos_path),
+ 'config',
+ 'mozbase_requirements.txt'
+ )
+ self.register_virtualenv_module(
+ requirements=[mozbase_requirements],
+ two_pass=True,
+ editable=True,
+ )
+ # require pip >= 1.5 so pip will prefer .whl files to install
+ super(Talos, self).create_virtualenv(
+ modules=['pip>=1.5']
+ )
+ # talos in harness requires what else is
+ # listed in talos requirements.txt file.
+ self.install_module(
+ requirements=[os.path.join(self.talos_path,
+ 'requirements.txt')]
+ )
+ # install jsonschema for perfherder validation
+ self.install_module(module="jsonschema")
+
+ def _validate_treeherder_data(self, parser):
+ # late import is required, because install is done in create_virtualenv
+ import jsonschema
+
+ if len(parser.found_perf_data) != 1:
+ self.critical("PERFHERDER_DATA was seen %d times, expected 1."
+ % len(parser.found_perf_data))
+ parser.update_worst_log_and_tbpl_levels(WARNING, TBPL_WARNING)
+ return
+
+ schema_path = os.path.join(external_tools_path,
+ 'performance-artifact-schema.json')
+ self.info("Validating PERFHERDER_DATA against %s" % schema_path)
+ try:
+ with open(schema_path) as f:
+ schema = json.load(f)
+ data = json.loads(parser.found_perf_data[0])
+ jsonschema.validate(data, schema)
+ except:
+ self.exception("Error while validating PERFHERDER_DATA")
+ parser.update_worst_log_and_tbpl_levels(WARNING, TBPL_WARNING)
+
+ def run_tests(self, args=None, **kw):
+ """run Talos tests"""
+
+ # get talos options
+ options = self.talos_options(args=args, **kw)
+
+ # XXX temporary python version check
+ python = self.query_python_path()
+ self.run_command([python, "--version"])
+ parser = TalosOutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=TalosErrorList)
+ env = {}
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not self.run_local:
+ env['MINIDUMP_STACKWALK'] = self.query_minidump_stackwalk()
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
+ self.mkdir_p(env['MOZ_UPLOAD_DIR'])
+ env = self.query_env(partial_env=env, log_level=INFO)
+ # adjust PYTHONPATH to be able to use talos as a python package
+ if 'PYTHONPATH' in env:
+ env['PYTHONPATH'] = self.talos_path + os.pathsep + env['PYTHONPATH']
+ else:
+ env['PYTHONPATH'] = self.talos_path
+
+ # sets a timeout for how long talos should run without output
+ output_timeout = self.config.get('talos_output_timeout', 3600)
+ # run talos tests
+ run_tests = os.path.join(self.talos_path, 'talos', 'run_tests.py')
+
+ mozlog_opts = ['--log-tbpl-level=debug']
+ if not self.run_local and 'suite' in self.config:
+ fname_pattern = '%s_%%s.log' % self.config['suite']
+ mozlog_opts.append('--log-errorsummary=%s'
+ % os.path.join(env['MOZ_UPLOAD_DIR'],
+ fname_pattern % 'errorsummary'))
+ mozlog_opts.append('--log-raw=%s'
+ % os.path.join(env['MOZ_UPLOAD_DIR'],
+ fname_pattern % 'raw'))
+
+ command = [python, run_tests] + options + mozlog_opts
+ self.return_code = self.run_command(command, cwd=self.workdir,
+ output_timeout=output_timeout,
+ output_parser=parser,
+ env=env)
+ if parser.minidump_output:
+ self.info("Looking at the minidump files for debugging purposes...")
+ for item in parser.minidump_output:
+ self.run_command(["ls", "-l", item])
+
+ if self.return_code not in [0]:
+ # update the worst log level and tbpl status
+ log_level = ERROR
+ tbpl_level = TBPL_FAILURE
+ if self.return_code == 1:
+ log_level = WARNING
+ tbpl_level = TBPL_WARNING
+ if self.return_code == 4:
+ log_level = WARNING
+ tbpl_level = TBPL_RETRY
+
+ parser.update_worst_log_and_tbpl_levels(log_level, tbpl_level)
+ else:
+ if not self.sps_profile:
+ self._validate_treeherder_data(parser)
+
+ self.buildbot_status(parser.worst_tbpl_status,
+ level=parser.worst_log_level)
diff --git a/testing/mozharness/mozharness/mozilla/testing/testbase.py b/testing/mozharness/mozharness/mozilla/testing/testbase.py
new file mode 100755
index 000000000..9f13ae100
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/testbase.py
@@ -0,0 +1,863 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import copy
+import os
+import platform
+import pprint
+import re
+import urllib2
+import json
+import socket
+
+from mozharness.base.errors import BaseErrorList
+from mozharness.base.log import FATAL, WARNING
+from mozharness.base.python import (
+ ResourceMonitoringMixin,
+ VirtualenvMixin,
+ virtualenv_config_options,
+)
+from mozharness.mozilla.buildbot import BuildbotMixin, TBPL_WARNING
+from mozharness.mozilla.proxxy import Proxxy
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+from mozharness.mozilla.taskcluster_helper import TaskClusterArtifactFinderMixin
+from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
+from mozharness.mozilla.testing.try_tools import TryToolsMixin, try_config_options
+from mozharness.mozilla.tooltool import TooltoolMixin
+
+from mozharness.lib.python.authentication import get_credentials
+
+INSTALLER_SUFFIXES = ('.apk', # Android
+ '.tar.bz2', '.tar.gz', # Linux
+ '.dmg', # Mac
+ '.installer-stub.exe', '.installer.exe', '.exe', '.zip', # Windows
+ )
+
+# https://dxr.mozilla.org/mozilla-central/source/testing/config/tooltool-manifests
+TOOLTOOL_PLATFORM_DIR = {
+ 'linux': 'linux32',
+ 'linux64': 'linux64',
+ 'win32': 'win32',
+ 'win64': 'win32',
+ 'macosx': 'macosx64',
+}
+
+
+testing_config_options = [
+ [["--installer-url"],
+ {"action": "store",
+ "dest": "installer_url",
+ "default": None,
+ "help": "URL to the installer to install",
+ }],
+ [["--installer-path"],
+ {"action": "store",
+ "dest": "installer_path",
+ "default": None,
+ "help": "Path to the installer to install. This is set automatically if run with --download-and-extract.",
+ }],
+ [["--binary-path"],
+ {"action": "store",
+ "dest": "binary_path",
+ "default": None,
+ "help": "Path to installed binary. This is set automatically if run with --install.",
+ }],
+ [["--exe-suffix"],
+ {"action": "store",
+ "dest": "exe_suffix",
+ "default": None,
+ "help": "Executable suffix for binaries on this platform",
+ }],
+ [["--test-url"],
+ {"action": "store",
+ "dest": "test_url",
+ "default": None,
+ "help": "URL to the zip file containing the actual tests",
+ }],
+ [["--test-packages-url"],
+ {"action": "store",
+ "dest": "test_packages_url",
+ "default": None,
+ "help": "URL to a json file describing which tests archives to download",
+ }],
+ [["--jsshell-url"],
+ {"action": "store",
+ "dest": "jsshell_url",
+ "default": None,
+ "help": "URL to the jsshell to install",
+ }],
+ [["--download-symbols"],
+ {"action": "store",
+ "dest": "download_symbols",
+ "type": "choice",
+ "choices": ['ondemand', 'true'],
+ "help": "Download and extract crash reporter symbols.",
+ }],
+] + copy.deepcopy(virtualenv_config_options) + copy.deepcopy(try_config_options)
+
+
+# TestingMixin {{{1
+class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin,
+ TaskClusterArtifactFinderMixin, TooltoolMixin, TryToolsMixin):
+ """
+ The steps to identify + download the proper bits for [browser] unit
+ tests and Talos.
+ """
+
+ installer_url = None
+ installer_path = None
+ binary_path = None
+ test_url = None
+ test_packages_url = None
+ symbols_url = None
+ symbols_path = None
+ jsshell_url = None
+ minidump_stackwalk_path = None
+ nodejs_path = None
+ default_tools_repo = 'https://hg.mozilla.org/build/tools'
+ proxxy = None
+
+ def _query_proxxy(self):
+ """manages the proxxy"""
+ if not self.proxxy:
+ self.proxxy = Proxxy(self.config, self.log_obj)
+ return self.proxxy
+
+ def download_proxied_file(self, url, file_name=None, parent_dir=None,
+ create_parent_dir=True, error_level=FATAL,
+ exit_code=3):
+ proxxy = self._query_proxxy()
+ return proxxy.download_proxied_file(url=url, file_name=file_name,
+ parent_dir=parent_dir,
+ create_parent_dir=create_parent_dir,
+ error_level=error_level,
+ exit_code=exit_code)
+
+ def download_file(self, *args, **kwargs):
+ '''
+ This function helps not to use download of proxied files
+ since it does not support authenticated downloads.
+ This could be re-factored and fixed in bug 1087664.
+ '''
+ if self.config.get("developer_mode"):
+ return super(TestingMixin, self).download_file(*args, **kwargs)
+ else:
+ return self.download_proxied_file(*args, **kwargs)
+
+ def query_build_dir_url(self, file_name):
+ """
+ Resolve a file name to a potential url in the build upload directory where
+ that file can be found.
+ """
+ if self.test_packages_url:
+ reference_url = self.test_packages_url
+ elif self.installer_url:
+ reference_url = self.installer_url
+ else:
+ self.fatal("Can't figure out build directory urls without an installer_url "
+ "or test_packages_url!")
+
+ last_slash = reference_url.rfind('/')
+ base_url = reference_url[:last_slash]
+
+ return '%s/%s' % (base_url, file_name)
+
+ def query_prefixed_build_dir_url(self, suffix):
+ """Resolve a file name prefixed with platform and build details to a potential url
+ in the build upload directory where that file can be found.
+ """
+ if self.test_packages_url:
+ reference_suffixes = ['.test_packages.json']
+ reference_url = self.test_packages_url
+ elif self.installer_url:
+ reference_suffixes = INSTALLER_SUFFIXES
+ reference_url = self.installer_url
+ else:
+ self.fatal("Can't figure out build directory urls without an installer_url "
+ "or test_packages_url!")
+
+ url = None
+ for reference_suffix in reference_suffixes:
+ if reference_url.endswith(reference_suffix):
+ url = reference_url[:-len(reference_suffix)] + suffix
+ break
+
+ return url
+
+ def query_symbols_url(self, raise_on_failure=False):
+ if self.symbols_url:
+ return self.symbols_url
+
+ elif self.installer_url:
+ symbols_url = self.query_prefixed_build_dir_url('.crashreporter-symbols.zip')
+
+ # Check if the URL exists. If not, use none to allow mozcrash to auto-check for symbols
+ try:
+ if symbols_url:
+ self._urlopen(symbols_url, timeout=120)
+ self.symbols_url = symbols_url
+ except Exception as ex:
+ self.warning("Cannot open symbols url %s (installer url: %s): %s" %
+ (symbols_url, self.installer_url, ex))
+ if raise_on_failure:
+ raise
+
+ # If no symbols URL can be determined let minidump_stackwalk query the symbols.
+ # As of now this only works for Nightly and release builds.
+ if not self.symbols_url:
+ self.warning("No symbols_url found. Let minidump_stackwalk query for symbols.")
+
+ return self.symbols_url
+
+ def _pre_config_lock(self, rw_config):
+ for i, (target_file, target_dict) in enumerate(rw_config.all_cfg_files_and_dicts):
+ if 'developer_config' in target_file:
+ self._developer_mode_changes(rw_config)
+
+ def _developer_mode_changes(self, rw_config):
+ """ This function is called when you append the config called
+ developer_config.py. This allows you to run a job
+ outside of the Release Engineering infrastructure.
+
+ What this functions accomplishes is:
+ * read-buildbot-config is removed from the list of actions
+ * --installer-url is set
+ * --test-url is set if needed
+ * every url is substituted by another external to the
+ Release Engineering network
+ """
+ c = self.config
+ orig_config = copy.deepcopy(c)
+ self.warning("When you use developer_config.py, we drop "
+ "'read-buildbot-config' from the list of actions.")
+ if "read-buildbot-config" in rw_config.actions:
+ rw_config.actions.remove("read-buildbot-config")
+ self.actions = tuple(rw_config.actions)
+
+ def _replace_url(url, changes):
+ for from_, to_ in changes:
+ if url.startswith(from_):
+ new_url = url.replace(from_, to_)
+ self.info("Replacing url %s -> %s" % (url, new_url))
+ return new_url
+ return url
+
+ if c.get("installer_url") is None:
+ self.exception("You must use --installer-url with developer_config.py")
+ if c.get("require_test_zip"):
+ if not c.get('test_url') and not c.get('test_packages_url'):
+ self.exception("You must use --test-url or --test-packages-url with developer_config.py")
+
+ c["installer_url"] = _replace_url(c["installer_url"], c["replace_urls"])
+ if c.get("test_url"):
+ c["test_url"] = _replace_url(c["test_url"], c["replace_urls"])
+ if c.get("test_packages_url"):
+ c["test_packages_url"] = _replace_url(c["test_packages_url"], c["replace_urls"])
+
+ for key, value in self.config.iteritems():
+ if type(value) == str and value.startswith("http"):
+ self.config[key] = _replace_url(value, c["replace_urls"])
+
+ # Any changes to c means that we need credentials
+ if not c == orig_config:
+ get_credentials()
+
+ def _urlopen(self, url, **kwargs):
+ '''
+ This function helps dealing with downloading files while outside
+ of the releng network.
+ '''
+ # Code based on http://code.activestate.com/recipes/305288-http-basic-authentication
+ def _urlopen_basic_auth(url, **kwargs):
+ self.info("We want to download this file %s" % url)
+ if not hasattr(self, "https_username"):
+ self.info("NOTICE: Files downloaded from outside of "
+ "Release Engineering network require LDAP "
+ "credentials.")
+
+ self.https_username, self.https_password = get_credentials()
+ # This creates a password manager
+ passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ # Because we have put None at the start it will use this username/password combination from here on
+ passman.add_password(None, url, self.https_username, self.https_password)
+ authhandler = urllib2.HTTPBasicAuthHandler(passman)
+
+ return urllib2.build_opener(authhandler).open(url, **kwargs)
+
+ # If we have the developer_run flag enabled then we will switch
+ # URLs to the right place and enable http authentication
+ if "developer_config.py" in self.config["config_files"]:
+ return _urlopen_basic_auth(url, **kwargs)
+ else:
+ return urllib2.urlopen(url, **kwargs)
+
+ # read_buildbot_config is in BuildbotMixin.
+
+ def find_artifacts_from_buildbot_changes(self):
+ c = self.config
+ try:
+ files = self.buildbot_config['sourcestamp']['changes'][-1]['files']
+ buildbot_prop_branch = self.buildbot_config['properties']['branch']
+
+ # Bug 868490 - Only require exactly two files if require_test_zip;
+ # otherwise accept either 1 or 2, since we'll be getting a
+ # test_zip url that we don't need.
+ expected_length = [1, 2, 3]
+ if c.get("require_test_zip") and not self.test_url:
+ expected_length = [2, 3]
+ if buildbot_prop_branch.startswith('gaia-try'):
+ expected_length = range(1, 1000)
+ actual_length = len(files)
+ if actual_length not in expected_length:
+ self.fatal("Unexpected number of files in buildbot config %s.\nExpected these number(s) of files: %s, but got: %d" %
+ (c['buildbot_json_path'], str(expected_length), actual_length))
+ for f in files:
+ if f['name'].endswith('tests.zip'): # yuk
+ if not self.test_url:
+ # str() because of unicode issues on mac
+ self.test_url = str(f['name'])
+ self.info("Found test url %s." % self.test_url)
+ elif f['name'].endswith('crashreporter-symbols.zip'): # yuk
+ self.symbols_url = str(f['name'])
+ self.info("Found symbols url %s." % self.symbols_url)
+ elif f['name'].endswith('test_packages.json'):
+ self.test_packages_url = str(f['name'])
+ self.info("Found a test packages url %s." % self.test_packages_url)
+ elif not any(f['name'].endswith(s) for s in ('code-coverage-gcno.zip',)):
+ if not self.installer_url:
+ self.installer_url = str(f['name'])
+ self.info("Found installer url %s." % self.installer_url)
+ except IndexError, e:
+ self.error(str(e))
+
+ def find_artifacts_from_taskcluster(self):
+ self.info("Finding installer, test and symbols from parent task. ")
+ task_id = self.buildbot_config['properties']['taskId']
+ self.set_parent_artifacts(task_id)
+
+ def postflight_read_buildbot_config(self):
+ """
+ Determine which files to download from the buildprops.json file
+ created via the buildbot ScriptFactory.
+ """
+ if self.buildbot_config:
+ c = self.config
+ message = "Unable to set %s from the buildbot config"
+ if c.get("installer_url"):
+ self.installer_url = c['installer_url']
+ if c.get("test_url"):
+ self.test_url = c['test_url']
+ if c.get("test_packages_url"):
+ self.test_packages_url = c['test_packages_url']
+
+ # This supports original Buildbot to Buildbot mode
+ if self.buildbot_config['sourcestamp']['changes']:
+ self.find_artifacts_from_buildbot_changes()
+
+ # This supports TaskCluster/BBB task to Buildbot job
+ elif 'testPackagesUrl' in self.buildbot_config['properties'] and \
+ 'packageUrl' in self.buildbot_config['properties']:
+ self.installer_url = self.buildbot_config['properties']['packageUrl']
+ self.test_packages_url = self.buildbot_config['properties']['testPackagesUrl']
+
+ # This supports TaskCluster/BBB task to TaskCluster/BBB task
+ elif 'taskId' in self.buildbot_config['properties']:
+ self.find_artifacts_from_taskcluster()
+
+ missing = []
+ if not self.installer_url:
+ missing.append("installer_url")
+ if c.get("require_test_zip") and not self.test_url and not self.test_packages_url:
+ missing.append("test_url")
+ if missing:
+ self.fatal("%s!" % (message % ('+'.join(missing))))
+ else:
+ self.fatal("self.buildbot_config isn't set after running read_buildbot_config!")
+
+ def _query_binary_version(self, regex, cmd):
+ output = self.get_output_from_command(cmd, silent=False)
+ return regex.search(output).group(0)
+
+ def preflight_download_and_extract(self):
+ message = ""
+ if not self.installer_url:
+ message += """installer_url isn't set!
+
+You can set this by:
+
+1. specifying --installer-url URL, or
+2. running via buildbot and running the read-buildbot-config action
+
+"""
+ if self.config.get("require_test_zip") and not self.test_url and not self.test_packages_url:
+ message += """test_url isn't set!
+
+You can set this by:
+
+1. specifying --test-url URL, or
+2. running via buildbot and running the read-buildbot-config action
+
+"""
+ if message:
+ self.fatal(message + "Can't run download-and-extract... exiting")
+
+ def _read_packages_manifest(self):
+ dirs = self.query_abs_dirs()
+ source = self.download_file(self.test_packages_url,
+ parent_dir=dirs['abs_work_dir'],
+ error_level=FATAL)
+
+ with self.opened(os.path.realpath(source)) as (fh, err):
+ package_requirements = json.load(fh)
+ if not package_requirements or err:
+ self.fatal("There was an error reading test package requirements from %s "
+ "requirements: `%s` - error: `%s`" % (source,
+ package_requirements or 'None',
+ err or 'No error'))
+ self.info("Using the following test package requirements:\n%s" %
+ pprint.pformat(package_requirements))
+ return package_requirements
+
+ def _download_test_packages(self, suite_categories, extract_dirs):
+ # Some platforms define more suite categories/names than others.
+ # This is a difference in the convention of the configs more than
+ # to how these tests are run, so we pave over these differences here.
+ aliases = {
+ 'robocop': 'mochitest',
+ 'mochitest-chrome': 'mochitest',
+ 'mochitest-media': 'mochitest',
+ 'mochitest-plain-clipboard': 'mochitest',
+ 'mochitest-plain-gpu': 'mochitest',
+ 'mochitest-gl': 'mochitest',
+ 'jsreftest': 'reftest',
+ 'crashtest': 'reftest',
+ 'reftest-debug': 'reftest',
+ 'jsreftest-debug': 'reftest',
+ 'crashtest-debug': 'reftest',
+ }
+ suite_categories = [aliases.get(name, name) for name in suite_categories]
+
+ dirs = self.query_abs_dirs()
+ test_install_dir = dirs.get('abs_test_install_dir',
+ os.path.join(dirs['abs_work_dir'], 'tests'))
+ self.mkdir_p(test_install_dir)
+ package_requirements = self._read_packages_manifest()
+ for category in suite_categories:
+ if category in package_requirements:
+ target_packages = package_requirements[category]
+ else:
+ # If we don't harness specific requirements, assume the common zip
+ # has everything we need to run tests for this suite.
+ target_packages = package_requirements['common']
+
+ self.info("Downloading packages: %s for test suite category: %s" %
+ (target_packages, category))
+ for file_name in target_packages:
+ target_dir = test_install_dir
+ unpack_dirs = extract_dirs
+
+ if "common.tests" in file_name and isinstance(unpack_dirs, list):
+ # Ensure that the following files are always getting extracted
+ required_files = ["mach",
+ "mozinfo.json",
+ ]
+ for req_file in required_files:
+ if req_file not in unpack_dirs:
+ self.info("Adding '{}' for extraction from common.tests zip file"
+ .format(req_file))
+ unpack_dirs.append(req_file)
+
+ if "jsshell-" in file_name or file_name == "target.jsshell.zip":
+ self.info("Special-casing the jsshell zip file")
+ unpack_dirs = None
+ target_dir = dirs['abs_test_bin_dir']
+
+ url = self.query_build_dir_url(file_name)
+ self.download_unpack(url, target_dir,
+ extract_dirs=unpack_dirs)
+
+ def _download_test_zip(self, extract_dirs=None):
+ dirs = self.query_abs_dirs()
+ test_install_dir = dirs.get('abs_test_install_dir',
+ os.path.join(dirs['abs_work_dir'], 'tests'))
+ self.download_unpack(self.test_url, test_install_dir,
+ extract_dirs=extract_dirs)
+
+ def structured_output(self, suite_category):
+ """Defines whether structured logging is in use in this configuration. This
+ may need to be replaced with data from a different config at the resolution
+ of bug 1070041 and related bugs.
+ """
+ return ('structured_suites' in self.config and
+ suite_category in self.config['structured_suites'])
+
+ def get_test_output_parser(self, suite_category, strict=False,
+ fallback_parser_class=DesktopUnittestOutputParser,
+ **kwargs):
+ """Derive and return an appropriate output parser, either the structured
+ output parser or a fallback based on the type of logging in use as determined by
+ configuration.
+ """
+ if not self.structured_output(suite_category):
+ if fallback_parser_class is DesktopUnittestOutputParser:
+ return DesktopUnittestOutputParser(suite_category=suite_category, **kwargs)
+ return fallback_parser_class(**kwargs)
+ self.info("Structured output parser in use for %s." % suite_category)
+ return StructuredOutputParser(suite_category=suite_category, strict=strict, **kwargs)
+
+ def _download_installer(self):
+ file_name = None
+ if self.installer_path:
+ file_name = self.installer_path
+ dirs = self.query_abs_dirs()
+ source = self.download_file(self.installer_url,
+ file_name=file_name,
+ parent_dir=dirs['abs_work_dir'],
+ error_level=FATAL)
+ self.installer_path = os.path.realpath(source)
+ self.set_buildbot_property("build_url", self.installer_url, write_to_file=True)
+
+ def _download_and_extract_symbols(self):
+ dirs = self.query_abs_dirs()
+ if self.config.get('download_symbols') == 'ondemand':
+ self.symbols_url = self.query_symbols_url()
+ self.symbols_path = self.symbols_url
+ return
+
+ else:
+ # In the case for 'ondemand', we're OK to proceed without getting a hold of the
+ # symbols right this moment, however, in other cases we need to at least retry
+ # before being unable to proceed (e.g. debug tests need symbols)
+ self.symbols_url = self.retry(
+ action=self.query_symbols_url,
+ kwargs={'raise_on_failure': True},
+ sleeptime=20,
+ error_level=FATAL,
+ error_message="We can't proceed without downloading symbols.",
+ )
+ if not self.symbols_path:
+ self.symbols_path = os.path.join(dirs['abs_work_dir'], 'symbols')
+
+ self.set_buildbot_property("symbols_url", self.symbols_url,
+ write_to_file=True)
+ if self.symbols_url:
+ self.download_unpack(self.symbols_url, self.symbols_path)
+
+ def download_and_extract(self, extract_dirs=None, suite_categories=None):
+ """
+ download and extract test zip / download installer
+ """
+ # Swap plain http for https when we're downloading from ftp
+ # See bug 957502 and friends
+ from_ = "http://ftp.mozilla.org"
+ to_ = "https://ftp-ssl.mozilla.org"
+ for attr in 'symbols_url', 'installer_url', 'test_packages_url', 'test_url':
+ url = getattr(self, attr)
+ if url and url.startswith(from_):
+ new_url = url.replace(from_, to_)
+ self.info("Replacing url %s -> %s" % (url, new_url))
+ setattr(self, attr, new_url)
+
+ if 'test_url' in self.config:
+ # A user has specified a test_url directly, any test_packages_url will
+ # be ignored.
+ if self.test_packages_url:
+ self.error('Test data will be downloaded from "%s", the specified test '
+ ' package data at "%s" will be ignored.' %
+ (self.config.get('test_url'), self.test_packages_url))
+
+ self._download_test_zip(extract_dirs)
+ else:
+ if not self.test_packages_url:
+ # The caller intends to download harness specific packages, but doesn't know
+ # where the packages manifest is located. This is the case when the
+ # test package manifest isn't set as a buildbot property, which is true
+ # for some self-serve jobs and platforms using parse_make_upload.
+ self.test_packages_url = self.query_prefixed_build_dir_url('.test_packages.json')
+
+ suite_categories = suite_categories or ['common']
+ self._download_test_packages(suite_categories, extract_dirs)
+
+ self._download_installer()
+ if self.config.get('download_symbols'):
+ self._download_and_extract_symbols()
+
+ # create_virtualenv is in VirtualenvMixin.
+
+ def preflight_install(self):
+ if not self.installer_path:
+ if self.config.get('installer_path'):
+ self.installer_path = self.config['installer_path']
+ else:
+ self.fatal("""installer_path isn't set!
+
+You can set this by:
+
+1. specifying --installer-path PATH, or
+2. running the download-and-extract action
+""")
+ if not self.is_python_package_installed("mozInstall"):
+ self.fatal("""Can't call install() without mozinstall!
+Did you run with --create-virtualenv? Is mozinstall in virtualenv_modules?""")
+
+ def install_app(self, app=None, target_dir=None, installer_path=None):
+ """ Dependent on mozinstall """
+ # install the application
+ cmd = self.query_exe("mozinstall", default=self.query_python_path("mozinstall"), return_type="list")
+ if app:
+ cmd.extend(['--app', app])
+ # Remove the below when we no longer need to support mozinstall 0.3
+ self.info("Detecting whether we're running mozinstall >=1.0...")
+ output = self.get_output_from_command(cmd + ['-h'])
+ if '--source' in output:
+ cmd.append('--source')
+ # End remove
+ dirs = self.query_abs_dirs()
+ if not target_dir:
+ target_dir = dirs.get('abs_app_install_dir',
+ os.path.join(dirs['abs_work_dir'],
+ 'application'))
+ self.mkdir_p(target_dir)
+ if not installer_path:
+ installer_path = self.installer_path
+ cmd.extend([installer_path,
+ '--destination', target_dir])
+ # TODO we'll need some error checking here
+ return self.get_output_from_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def install(self):
+ self.binary_path = self.install_app(app=self.config.get('application'))
+
+ def uninstall_app(self, install_dir=None):
+ """ Dependent on mozinstall """
+ # uninstall the application
+ cmd = self.query_exe("mozuninstall",
+ default=self.query_python_path("mozuninstall"),
+ return_type="list")
+ dirs = self.query_abs_dirs()
+ if not install_dir:
+ install_dir = dirs.get('abs_app_install_dir',
+ os.path.join(dirs['abs_work_dir'],
+ 'application'))
+ cmd.append(install_dir)
+ # TODO we'll need some error checking here
+ self.get_output_from_command(cmd, halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def uninstall(self):
+ self.uninstall_app()
+
+ def query_minidump_tooltool_manifest(self):
+ if self.config.get('minidump_tooltool_manifest_path'):
+ return self.config['minidump_tooltool_manifest_path']
+
+ self.info('Minidump tooltool manifest unknown. Determining based upon '
+ 'platform and architecture.')
+ platform_name = self.platform_name()
+
+ if platform_name:
+ tooltool_path = "config/tooltool-manifests/%s/releng.manifest" % \
+ TOOLTOOL_PLATFORM_DIR[platform_name]
+ return tooltool_path
+ else:
+ self.fatal('We could not determine the minidump\'s filename.')
+
+ def query_minidump_filename(self):
+ if self.config.get('minidump_stackwalk_path'):
+ return self.config['minidump_stackwalk_path']
+
+ self.info('Minidump filename unknown. Determining based upon platform '
+ 'and architecture.')
+ platform_name = self.platform_name()
+ if platform_name:
+ minidump_filename = '%s-minidump_stackwalk' % TOOLTOOL_PLATFORM_DIR[platform_name]
+ if platform_name in ('win32', 'win64'):
+ minidump_filename += '.exe'
+ return minidump_filename
+ else:
+ self.fatal('We could not determine the minidump\'s filename.')
+
+ def query_nodejs_tooltool_manifest(self):
+ if self.config.get('nodejs_tooltool_manifest_path'):
+ return self.config['nodejs_tooltool_manifest_path']
+
+ self.info('NodeJS tooltool manifest unknown. Determining based upon '
+ 'platform and architecture.')
+ platform_name = self.platform_name()
+
+ if platform_name:
+ tooltool_path = "config/tooltool-manifests/%s/nodejs.manifest" % \
+ TOOLTOOL_PLATFORM_DIR[platform_name]
+ return tooltool_path
+ else:
+ self.fatal('Could not determine nodejs manifest filename')
+
+ def query_nodejs_filename(self):
+ if self.config.get('nodejs_path'):
+ return self.config['nodejs_path']
+
+ self.fatal('Could not determine nodejs filename')
+
+ def query_nodejs(self, manifest=None):
+ if self.nodejs_path:
+ return self.nodejs_path
+
+ c = self.config
+ dirs = self.query_abs_dirs();
+
+ nodejs_path = self.query_nodejs_filename()
+ if not self.config.get('download_nodejs'):
+ self.nodejs_path = nodejs_path
+ return self.nodejs_path
+
+ if not manifest:
+ tooltool_manifest_path = self.query_nodejs_tooltool_manifest()
+ manifest = os.path.join(dirs.get('abs_test_install_dir',
+ os.path.join(dirs['abs_work_dir'], 'tests')),
+ tooltool_manifest_path)
+
+ self.info('grabbing nodejs binary from tooltool')
+ try:
+ self.tooltool_fetch(
+ manifest=manifest,
+ output_dir=dirs['abs_work_dir'],
+ cache=c.get('tooltool_cache')
+ )
+ except KeyError:
+ self.error('missing a required key')
+
+ abs_nodejs_path = os.path.join(dirs['abs_work_dir'], nodejs_path)
+
+ if os.path.exists(abs_nodejs_path):
+ if self.platform_name() not in ('win32', 'win64'):
+ self.chmod(abs_nodejs_path, 0755)
+ self.nodejs_path = abs_nodejs_path
+ else:
+ self.warning("nodejs path was given but couldn't be found. Tried looking in '%s'" % abs_nodejs_path)
+ self.buildbot_status(TBPL_WARNING, WARNING)
+
+ return self.nodejs_path
+
+ def query_minidump_stackwalk(self, manifest=None):
+ if self.minidump_stackwalk_path:
+ return self.minidump_stackwalk_path
+
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ # This is the path where we either download to or is already on the host
+ minidump_stackwalk_path = self.query_minidump_filename()
+
+ if not c.get('download_minidump_stackwalk'):
+ self.minidump_stackwalk_path = minidump_stackwalk_path
+ else:
+ if not manifest:
+ tooltool_manifest_path = self.query_minidump_tooltool_manifest()
+ manifest = os.path.join(dirs.get('abs_test_install_dir',
+ os.path.join(dirs['abs_work_dir'], 'tests')),
+ tooltool_manifest_path)
+
+ self.info('grabbing minidump binary from tooltool')
+ try:
+ self.tooltool_fetch(
+ manifest=manifest,
+ output_dir=dirs['abs_work_dir'],
+ cache=c.get('tooltool_cache')
+ )
+ except KeyError:
+ self.error('missing a required key.')
+
+ abs_minidump_path = os.path.join(dirs['abs_work_dir'],
+ minidump_stackwalk_path)
+ if os.path.exists(abs_minidump_path):
+ self.chmod(abs_minidump_path, 0755)
+ self.minidump_stackwalk_path = abs_minidump_path
+ else:
+ self.warning("minidump stackwalk path was given but couldn't be found. "
+ "Tried looking in '%s'" % abs_minidump_path)
+ # don't burn the job but we should at least turn them orange so it is caught
+ self.buildbot_status(TBPL_WARNING, WARNING)
+
+ return self.minidump_stackwalk_path
+
+ def query_options(self, *args, **kwargs):
+ if "str_format_values" in kwargs:
+ str_format_values = kwargs.pop("str_format_values")
+ else:
+ str_format_values = {}
+
+ arguments = []
+
+ for arg in args:
+ if arg is not None:
+ arguments.extend(argument % str_format_values for argument in arg)
+
+ return arguments
+
+ def query_tests_args(self, *args, **kwargs):
+ if "str_format_values" in kwargs:
+ str_format_values = kwargs.pop("str_format_values")
+ else:
+ str_format_values = {}
+
+ arguments = []
+
+ for arg in reversed(args):
+ if arg:
+ arguments.append("--")
+ arguments.extend(argument % str_format_values for argument in arg)
+ break
+
+ return arguments
+
+ def _run_cmd_checks(self, suites):
+ if not suites:
+ return
+ dirs = self.query_abs_dirs()
+ for suite in suites:
+ # XXX platform.architecture() may give incorrect values for some
+ # platforms like mac as excutable files may be universal
+ # files containing multiple architectures
+ # NOTE 'enabled' is only here while we have unconsolidated configs
+ if not suite['enabled']:
+ continue
+ if suite.get('architectures'):
+ arch = platform.architecture()[0]
+ if arch not in suite['architectures']:
+ continue
+ cmd = suite['cmd']
+ name = suite['name']
+ self.info("Running pre test command %(name)s with '%(cmd)s'"
+ % {'name': name, 'cmd': ' '.join(cmd)})
+ if self.buildbot_config: # this cmd is for buildbot
+ # TODO rather then checking for formatting on every string
+ # in every preflight enabled cmd: find a better solution!
+ # maybe I can implement WithProperties in mozharness?
+ cmd = [x % (self.buildbot_config.get('properties'))
+ for x in cmd]
+ self.run_command(cmd,
+ cwd=dirs['abs_work_dir'],
+ error_list=BaseErrorList,
+ halt_on_failure=suite['halt_on_failure'],
+ fatal_exit_code=suite.get('fatal_exit_code', 3))
+
+ def preflight_run_tests(self):
+ """preflight commands for all tests"""
+ c = self.config
+ if c.get('run_cmd_checks_enabled'):
+ self._run_cmd_checks(c.get('preflight_run_cmd_suites', []))
+ elif c.get('preflight_run_cmd_suites'):
+ self.warning("Proceeding without running prerun test commands."
+ " These are often OS specific and disabling them may"
+ " result in spurious test results!")
+
+ def postflight_run_tests(self):
+ """preflight commands for all tests"""
+ c = self.config
+ if c.get('run_cmd_checks_enabled'):
+ self._run_cmd_checks(c.get('postflight_run_cmd_suites', []))
diff --git a/testing/mozharness/mozharness/mozilla/testing/try_tools.py b/testing/mozharness/mozharness/mozilla/testing/try_tools.py
new file mode 100644
index 000000000..3708e71db
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/try_tools.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import argparse
+import os
+import re
+from collections import defaultdict
+
+from mozharness.base.script import PostScriptAction
+from mozharness.base.transfer import TransferMixin
+
+try_config_options = [
+ [["--try-message"],
+ {"action": "store",
+ "dest": "try_message",
+ "default": None,
+ "help": "try syntax string to select tests to run",
+ }],
+]
+
+test_flavors = {
+ 'browser-chrome': {},
+ 'chrome': {},
+ 'devtools-chrome': {},
+ 'mochitest': {},
+ 'xpcshell' :{},
+ 'reftest': {
+ "path": lambda x: os.path.join("tests", "reftest", "tests", x)
+ },
+ 'crashtest': {
+ "path": lambda x: os.path.join("tests", "reftest", "tests", x)
+ },
+ 'web-platform-tests': {
+ "path": lambda x: os.path.join("tests", x.split("testing" + os.path.sep)[1])
+ }
+}
+
+class TryToolsMixin(TransferMixin):
+ """Utility functions for an interface between try syntax and out test harnesses.
+ Requires log and script mixins."""
+
+ harness_extra_args = None
+ try_test_paths = {}
+ known_try_arguments = {
+ '--tag': ({
+ 'action': 'append',
+ 'dest': 'tags',
+ 'default': None,
+ }, (
+ 'browser-chrome',
+ 'chrome',
+ 'devtools-chrome',
+ 'marionette',
+ 'mochitest',
+ 'web-plaftform-tests',
+ 'xpcshell',
+ )),
+ '--setenv': ({
+ 'action': 'append',
+ 'dest': 'setenv',
+ 'default': [],
+ 'metavar': 'NAME=VALUE',
+ }, (
+ 'browser-chrome',
+ 'chrome',
+ 'crashtest',
+ 'devtools-chrome',
+ 'mochitest',
+ 'reftest',
+ )),
+ }
+
+ def _extract_try_message(self):
+ msg = None
+ buildbot_config = self.buildbot_config or {}
+ if "try_message" in self.config and self.config["try_message"]:
+ msg = self.config["try_message"]
+ elif 'TRY_COMMIT_MSG' in os.environ:
+ msg = os.environ['TRY_COMMIT_MSG']
+ elif self._is_try():
+ if 'sourcestamp' in buildbot_config and buildbot_config['sourcestamp'].get('changes'):
+ msg = buildbot_config['sourcestamp']['changes'][-1].get('comments')
+
+ if msg is None or len(msg) == 1024:
+ # This commit message was potentially truncated or not available in
+ # buildbot_config (e.g. if running in TaskCluster), get the full message
+ # from hg.
+ props = buildbot_config.get('properties', {})
+ repo_url = 'https://hg.mozilla.org/%s/'
+ if 'revision' in props and 'repo_path' in props:
+ rev = props['revision']
+ repo_path = props['repo_path']
+ else:
+ # In TaskCluster we have no buildbot props, rely on env vars instead
+ rev = os.environ.get('GECKO_HEAD_REV')
+ repo_path = self.config.get('branch')
+ if repo_path:
+ repo_url = repo_url % repo_path
+ else:
+ repo_url = os.environ.get('GECKO_HEAD_REPOSITORY',
+ repo_url % 'try')
+ if not repo_url.endswith('/'):
+ repo_url += '/'
+
+ url = '{}json-pushes?changeset={}&full=1'.format(repo_url, rev)
+
+ pushinfo = self.load_json_from_url(url)
+ for k, v in pushinfo.items():
+ if isinstance(v, dict) and 'changesets' in v:
+ msg = v['changesets'][-1]['desc']
+
+ if not msg and 'try_syntax' in buildbot_config.get('properties', {}):
+ # If we don't find try syntax in the usual place, check for it in an
+ # alternate property available to tools using self-serve.
+ msg = buildbot_config['properties']['try_syntax']
+ if not msg:
+ self.warning('Try message not found.')
+ return msg
+
+ def _extract_try_args(self, msg):
+ """ Returns a list of args from a try message, for parsing """
+ if not msg:
+ return None
+ all_try_args = None
+ for line in msg.splitlines():
+ if 'try: ' in line:
+ # Autoland adds quotes to try strings that will confuse our
+ # args later on.
+ if line.startswith('"') and line.endswith('"'):
+ line = line[1:-1]
+ # Allow spaces inside of [filter expressions]
+ try_message = line.strip().split('try: ', 1)
+ all_try_args = re.findall(r'(?:\[.*?\]|\S)+', try_message[1])
+ break
+ if not all_try_args:
+ self.warning('Try syntax not found in: %s.' % msg )
+ return all_try_args
+
+ def try_message_has_flag(self, flag, message=None):
+ """
+ Returns True if --`flag` is present in message.
+ """
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--' + flag, action='store_true')
+ message = message or self._extract_try_message()
+ if not message:
+ return False
+ msg_list = self._extract_try_args(message)
+ args, _ = parser.parse_known_args(msg_list)
+ return getattr(args, flag, False)
+
+ def _is_try(self):
+ repo_path = None
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ repo_path = self.buildbot_config['properties'].get('branch')
+ return (self.config.get('branch', repo_path) == 'try' or
+ 'TRY_COMMIT_MSG' in os.environ)
+
+ @PostScriptAction('download-and-extract')
+ def set_extra_try_arguments(self, action, success=None):
+ """Finds a commit message and parses it for extra arguments to pass to the test
+ harness command line and test paths used to filter manifests.
+
+ Extracting arguments from a commit message taken directly from the try_parser.
+ """
+ if not self._is_try():
+ return
+
+ msg = self._extract_try_message()
+ if not msg:
+ return
+
+ all_try_args = self._extract_try_args(msg)
+ if not all_try_args:
+ return
+
+ parser = argparse.ArgumentParser(
+ description=('Parse an additional subset of arguments passed to try syntax'
+ ' and forward them to the underlying test harness command.'))
+
+ label_dict = {}
+ def label_from_val(val):
+ if val in label_dict:
+ return label_dict[val]
+ return '--%s' % val.replace('_', '-')
+
+ for label, (opts, _) in self.known_try_arguments.iteritems():
+ if 'action' in opts and opts['action'] not in ('append', 'store',
+ 'store_true', 'store_false'):
+ self.fatal('Try syntax does not support passing custom or store_const '
+ 'arguments to the harness process.')
+ if 'dest' in opts:
+ label_dict[opts['dest']] = label
+
+ parser.add_argument(label, **opts)
+
+ parser.add_argument('--try-test-paths', nargs='*')
+ (args, _) = parser.parse_known_args(all_try_args)
+ self.try_test_paths = self._group_test_paths(args.try_test_paths)
+ del args.try_test_paths
+
+ out_args = defaultdict(list)
+ # This is a pretty hacky way to echo arguments down to the harness.
+ # Hopefully this can be improved once we have a configuration system
+ # in tree for harnesses that relies less on a command line.
+ for arg, value in vars(args).iteritems():
+ if value:
+ label = label_from_val(arg)
+ _, flavors = self.known_try_arguments[label]
+
+ for f in flavors:
+ if isinstance(value, bool):
+ # A store_true or store_false argument.
+ out_args[f].append(label)
+ elif isinstance(value, list):
+ out_args[f].extend(['%s=%s' % (label, el) for el in value])
+ else:
+ out_args[f].append('%s=%s' % (label, value))
+
+ self.harness_extra_args = dict(out_args)
+
+ def _group_test_paths(self, args):
+ rv = defaultdict(list)
+
+ if args is None:
+ return rv
+
+ for item in args:
+ suite, path = item.split(":", 1)
+ rv[suite].append(path)
+ return rv
+
+ def try_args(self, flavor):
+ """Get arguments, test_list derived from try syntax to apply to a command"""
+ args = []
+ if self.harness_extra_args:
+ args = self.harness_extra_args.get(flavor, [])[:]
+
+ if self.try_test_paths.get(flavor):
+ self.info('TinderboxPrint: Tests will be run from the following '
+ 'files: %s.' % ','.join(self.try_test_paths[flavor]))
+ args.extend(['--this-chunk=1', '--total-chunks=1'])
+
+ path_func = test_flavors[flavor].get("path", lambda x:x)
+ tests = [path_func(item) for item in self.try_test_paths[flavor]]
+ else:
+ tests = []
+
+ if args or tests:
+ self.info('TinderboxPrint: The following arguments were forwarded from mozharness '
+ 'to the test command:\nTinderboxPrint: \t%s -- %s' %
+ (" ".join(args), " ".join(tests)))
+
+ return args, tests
diff --git a/testing/mozharness/mozharness/mozilla/testing/unittest.py b/testing/mozharness/mozharness/mozilla/testing/unittest.py
new file mode 100755
index 000000000..d935ff699
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/testing/unittest.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import re
+
+from mozharness.mozilla.testing.errors import TinderBoxPrintRe
+from mozharness.base.log import OutputParser, WARNING, INFO, CRITICAL, ERROR
+from mozharness.mozilla.buildbot import TBPL_WARNING, TBPL_FAILURE, TBPL_RETRY
+from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WORST_LEVEL_TUPLE
+
+SUITE_CATEGORIES = ['mochitest', 'reftest', 'xpcshell']
+
+
+def tbox_print_summary(pass_count, fail_count, known_fail_count=None,
+ crashed=False, leaked=False):
+ emphasize_fail_text = '<em class="testfail">%s</em>'
+
+ if pass_count < 0 or fail_count < 0 or \
+ (known_fail_count is not None and known_fail_count < 0):
+ summary = emphasize_fail_text % 'T-FAIL'
+ elif pass_count == 0 and fail_count == 0 and \
+ (known_fail_count == 0 or known_fail_count is None):
+ summary = emphasize_fail_text % 'T-FAIL'
+ else:
+ str_fail_count = str(fail_count)
+ if fail_count > 0:
+ str_fail_count = emphasize_fail_text % str_fail_count
+ summary = "%d/%s" % (pass_count, str_fail_count)
+ if known_fail_count is not None:
+ summary += "/%d" % known_fail_count
+ # Format the crash status.
+ if crashed:
+ summary += "&nbsp;%s" % emphasize_fail_text % "CRASH"
+ # Format the leak status.
+ if leaked is not False:
+ summary += "&nbsp;%s" % emphasize_fail_text % (
+ (leaked and "LEAK") or "L-FAIL")
+ return summary
+
+
+class TestSummaryOutputParserHelper(OutputParser):
+ def __init__(self, regex=re.compile(r'(passed|failed|todo): (\d+)'), **kwargs):
+ self.regex = regex
+ self.failed = 0
+ self.passed = 0
+ self.todo = 0
+ self.last_line = None
+ self.tbpl_status = TBPL_SUCCESS
+ self.worst_log_level = INFO
+ super(TestSummaryOutputParserHelper, self).__init__(**kwargs)
+
+ def parse_single_line(self, line):
+ super(TestSummaryOutputParserHelper, self).parse_single_line(line)
+ self.last_line = line
+ m = self.regex.search(line)
+ if m:
+ try:
+ setattr(self, m.group(1), int(m.group(2)))
+ except ValueError:
+ # ignore bad values
+ pass
+
+ def evaluate_parser(self, return_code, success_codes=None):
+ if return_code == 0 and self.passed > 0 and self.failed == 0:
+ self.tbpl_status = TBPL_SUCCESS
+ elif return_code == 10 and self.failed > 0:
+ self.tbpl_status = TBPL_WARNING
+ else:
+ self.tbpl_status = TBPL_FAILURE
+ self.worst_log_level = ERROR
+
+ return (self.tbpl_status, self.worst_log_level)
+
+ def print_summary(self, suite_name):
+ # generate the TinderboxPrint line for TBPL
+ emphasize_fail_text = '<em class="testfail">%s</em>'
+ failed = "0"
+ if self.passed == 0 and self.failed == 0:
+ self.tsummary = emphasize_fail_text % "T-FAIL"
+ else:
+ if self.failed > 0:
+ failed = emphasize_fail_text % str(self.failed)
+ self.tsummary = "%d/%s/%d" % (self.passed, failed, self.todo)
+
+ self.info("TinderboxPrint: %s<br/>%s\n" % (suite_name, self.tsummary))
+
+ def append_tinderboxprint_line(self, suite_name):
+ self.print_summary(suite_name)
+
+
+class DesktopUnittestOutputParser(OutputParser):
+ """
+ A class that extends OutputParser such that it can parse the number of
+ passed/failed/todo tests from the output.
+ """
+
+ def __init__(self, suite_category, **kwargs):
+ # worst_log_level defined already in DesktopUnittestOutputParser
+ # but is here to make pylint happy
+ self.worst_log_level = INFO
+ super(DesktopUnittestOutputParser, self).__init__(**kwargs)
+ self.summary_suite_re = TinderBoxPrintRe.get('%s_summary' % suite_category, {})
+ self.harness_error_re = TinderBoxPrintRe['harness_error']['minimum_regex']
+ self.full_harness_error_re = TinderBoxPrintRe['harness_error']['full_regex']
+ self.harness_retry_re = TinderBoxPrintRe['harness_error']['retry_regex']
+ self.fail_count = -1
+ self.pass_count = -1
+ # known_fail_count does not exist for some suites
+ self.known_fail_count = self.summary_suite_re.get('known_fail_group') and -1
+ self.crashed, self.leaked = False, False
+ self.tbpl_status = TBPL_SUCCESS
+
+ def parse_single_line(self, line):
+ if self.summary_suite_re:
+ summary_m = self.summary_suite_re['regex'].match(line) # pass/fail/todo
+ if summary_m:
+ message = ' %s' % line
+ log_level = INFO
+ # remove all the none values in groups() so this will work
+ # with all suites including mochitest browser-chrome
+ summary_match_list = [group for group in summary_m.groups()
+ if group is not None]
+ r = summary_match_list[0]
+ if self.summary_suite_re['pass_group'] in r:
+ if len(summary_match_list) > 1:
+ self.pass_count = int(summary_match_list[-1])
+ else:
+ # This handles suites that either pass or report
+ # number of failures. We need to set both
+ # pass and fail count in the pass case.
+ self.pass_count = 1
+ self.fail_count = 0
+ elif self.summary_suite_re['fail_group'] in r:
+ self.fail_count = int(summary_match_list[-1])
+ if self.fail_count > 0:
+ message += '\n One or more unittests failed.'
+ log_level = WARNING
+ # If self.summary_suite_re['known_fail_group'] == None,
+ # then r should not match it, # so this test is fine as is.
+ elif self.summary_suite_re['known_fail_group'] in r:
+ self.known_fail_count = int(summary_match_list[-1])
+ self.log(message, log_level)
+ return # skip harness check and base parse_single_line
+ harness_match = self.harness_error_re.match(line)
+ if harness_match:
+ self.warning(' %s' % line)
+ self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
+ self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+ full_harness_match = self.full_harness_error_re.match(line)
+ if full_harness_match:
+ r = full_harness_match.group(1)
+ if r == "application crashed":
+ self.crashed = True
+ elif r == "missing output line for total leaks!":
+ self.leaked = None
+ else:
+ self.leaked = True
+ return # skip base parse_single_line
+ if self.harness_retry_re.search(line):
+ self.critical(' %s' % line)
+ self.worst_log_level = self.worst_level(CRITICAL, self.worst_log_level)
+ self.tbpl_status = self.worst_level(TBPL_RETRY, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+ return # skip base parse_single_line
+ super(DesktopUnittestOutputParser, self).parse_single_line(line)
+
+ def evaluate_parser(self, return_code, success_codes=None):
+ success_codes = success_codes or [0]
+
+ if self.num_errors: # mozharness ran into a script error
+ self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ # I have to put this outside of parse_single_line because this checks not
+ # only if fail_count was more then 0 but also if fail_count is still -1
+ # (no fail summary line was found)
+ if self.fail_count != 0:
+ self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
+ self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ # Account for the possibility that no test summary was output.
+ if self.pass_count <= 0 and self.fail_count <= 0 and \
+ (self.known_fail_count is None or self.known_fail_count <= 0):
+ self.error('No tests run or test summary not found')
+ self.worst_log_level = self.worst_level(WARNING,
+ self.worst_log_level)
+ self.tbpl_status = self.worst_level(TBPL_WARNING,
+ self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ if return_code not in success_codes:
+ self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
+ levels=TBPL_WORST_LEVEL_TUPLE)
+
+ # we can trust in parser.worst_log_level in either case
+ return (self.tbpl_status, self.worst_log_level)
+
+ def append_tinderboxprint_line(self, suite_name):
+ # We are duplicating a condition (fail_count) from evaluate_parser and
+ # parse parse_single_line but at little cost since we are not parsing
+ # the log more then once. I figured this method should stay isolated as
+ # it is only here for tbpl highlighted summaries and is not part of
+ # buildbot evaluation or result status IIUC.
+ summary = tbox_print_summary(self.pass_count,
+ self.fail_count,
+ self.known_fail_count,
+ self.crashed,
+ self.leaked)
+ self.info("TinderboxPrint: %s<br/>%s\n" % (suite_name, summary))
+
+
+class EmulatorMixin(object):
+ """ Currently dependent on both TooltoolMixin and TestingMixin)"""
+
+ def install_emulator_from_tooltool(self, manifest_path, do_unzip=True):
+ dirs = self.query_abs_dirs()
+ if self.tooltool_fetch(manifest_path, output_dir=dirs['abs_work_dir'],
+ cache=self.config.get("tooltool_cache", None)
+ ):
+ self.fatal("Unable to download emulator via tooltool!")
+ if do_unzip:
+ unzip = self.query_exe("unzip")
+ unzip_cmd = [unzip, '-q', os.path.join(dirs['abs_work_dir'], "emulator.zip")]
+ self.run_command(unzip_cmd, cwd=dirs['abs_emulator_dir'], halt_on_failure=True,
+ fatal_exit_code=3)
+
+ def install_emulator(self):
+ dirs = self.query_abs_dirs()
+ self.mkdir_p(dirs['abs_emulator_dir'])
+ if self.config.get('emulator_url'):
+ self.download_unpack(self.config['emulator_url'], dirs['abs_emulator_dir'])
+ elif self.config.get('emulator_manifest'):
+ manifest_path = self.create_tooltool_manifest(self.config['emulator_manifest'])
+ do_unzip = True
+ if 'unpack' in self.config['emulator_manifest']:
+ do_unzip = False
+ self.install_emulator_from_tooltool(manifest_path, do_unzip)
+ elif self.buildbot_config:
+ props = self.buildbot_config.get('properties')
+ url = 'https://hg.mozilla.org/%s/raw-file/%s/b2g/test/emulator.manifest' % (
+ props['repo_path'], props['revision'])
+ manifest_path = self.download_file(url,
+ file_name='tooltool.tt',
+ parent_dir=dirs['abs_work_dir'])
+ if not manifest_path:
+ self.fatal("Can't download emulator manifest from %s" % url)
+ self.install_emulator_from_tooltool(manifest_path)
+ else:
+ self.fatal("Can't get emulator; set emulator_url or emulator_manifest in the config!")
+ if self.config.get('tools_manifest'):
+ manifest_path = self.create_tooltool_manifest(self.config['tools_manifest'])
+ do_unzip = True
+ if 'unpack' in self.config['tools_manifest']:
+ do_unzip = False
+ self.install_emulator_from_tooltool(manifest_path, do_unzip)
diff --git a/testing/mozharness/mozharness/mozilla/tooltool.py b/testing/mozharness/mozharness/mozilla/tooltool.py
new file mode 100644
index 000000000..0bd98e0a2
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/tooltool.py
@@ -0,0 +1,129 @@
+"""module for tooltool operations"""
+import os
+import sys
+
+from mozharness.base.errors import PythonErrorList
+from mozharness.base.log import ERROR, FATAL
+from mozharness.mozilla.proxxy import Proxxy
+
+TooltoolErrorList = PythonErrorList + [{
+ 'substr': 'ERROR - ', 'level': ERROR
+}]
+
+
+TOOLTOOL_PY_URL = \
+ "https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py"
+
+TOOLTOOL_SERVERS = [
+ 'https://api.pub.build.mozilla.org/tooltool/',
+]
+
+
+class TooltoolMixin(object):
+ """Mixin class for handling tooltool manifests.
+ To use a tooltool server other than the Mozilla server, override
+ config['tooltool_servers']. To specify a different authentication
+ file than that used in releng automation,override
+ config['tooltool_authentication_file']; set it to None to not pass
+ any authentication information (OK for public files)
+ """
+ def _get_auth_file(self):
+ # set the default authentication file based on platform; this
+ # corresponds to where puppet puts the token
+ if 'tooltool_authentication_file' in self.config:
+ fn = self.config['tooltool_authentication_file']
+ elif self._is_windows():
+ fn = r'c:\builds\relengapi.tok'
+ else:
+ fn = '/builds/relengapi.tok'
+
+ # if the file doesn't exist, don't pass it to tooltool (it will just
+ # fail). In taskcluster, this will work OK as the relengapi-proxy will
+ # take care of auth. Everywhere else, we'll get auth failures if
+ # necessary.
+ if os.path.exists(fn):
+ return fn
+
+ def tooltool_fetch(self, manifest,
+ output_dir=None, privileged=False, cache=None):
+ """docstring for tooltool_fetch"""
+ # Use vendored tooltool.py if available.
+ if self.topsrcdir:
+ cmd = [
+ sys.executable,
+ os.path.join(self.topsrcdir, 'testing', 'docker', 'recipes',
+ 'tooltool.py')
+ ]
+ elif self.config.get("download_tooltool"):
+ cmd = [sys.executable, self._fetch_tooltool_py()]
+ else:
+ cmd = self.query_exe('tooltool.py', return_type='list')
+
+ # get the tooltool servers from configuration
+ default_urls = self.config.get('tooltool_servers', TOOLTOOL_SERVERS)
+
+ # add slashes (bug 1155630)
+ def add_slash(url):
+ return url if url.endswith('/') else (url + '/')
+ default_urls = [add_slash(u) for u in default_urls]
+
+ # proxxy-ify
+ proxxy = Proxxy(self.config, self.log_obj)
+ proxxy_urls = proxxy.get_proxies_and_urls(default_urls)
+
+ for proxyied_url in proxxy_urls:
+ cmd.extend(['--url', proxyied_url])
+
+ # handle authentication file, if given
+ auth_file = self._get_auth_file()
+ if auth_file and os.path.exists(auth_file):
+ cmd.extend(['--authentication-file', auth_file])
+
+ cmd.extend(['fetch', '-m', manifest, '-o'])
+
+ if cache:
+ cmd.extend(['-c', cache])
+
+ # when mock is enabled run tooltool in mock. We can't use
+ # run_command_m in all cases because it won't exist unless
+ # MockMixin is used on the parent class
+ if self.config.get('mock_target'):
+ cmd_runner = self.run_command_m
+ else:
+ cmd_runner = self.run_command
+
+ timeout = self.config.get('tooltool_timeout', 10 * 60)
+
+ self.retry(
+ cmd_runner,
+ args=(cmd, ),
+ kwargs={'cwd': output_dir,
+ 'error_list': TooltoolErrorList,
+ 'privileged': privileged,
+ 'output_timeout': timeout,
+ },
+ good_statuses=(0, ),
+ error_message="Tooltool %s fetch failed!" % manifest,
+ error_level=FATAL,
+ )
+
+ def _fetch_tooltool_py(self):
+ """ Retrieve tooltool.py
+ """
+ dirs = self.query_abs_dirs()
+ file_path = os.path.join(dirs['abs_work_dir'], "tooltool.py")
+ self.download_file(TOOLTOOL_PY_URL, file_path)
+ if not os.path.exists(file_path):
+ self.fatal("We can't get tooltool.py")
+ self.chmod(file_path, 0755)
+ return file_path
+
+ def create_tooltool_manifest(self, contents, path=None):
+ """ Currently just creates a manifest, given the contents.
+ We may want a template and individual values in the future?
+ """
+ if path is None:
+ dirs = self.query_abs_dirs()
+ path = os.path.join(dirs['abs_work_dir'], 'tooltool.tt')
+ self.write_to_file(path, contents, error_level=FATAL)
+ return path
diff --git a/testing/mozharness/mozharness/mozilla/updates/__init__.py b/testing/mozharness/mozharness/mozilla/updates/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/updates/__init__.py
diff --git a/testing/mozharness/mozharness/mozilla/updates/balrog.py b/testing/mozharness/mozharness/mozilla/updates/balrog.py
new file mode 100644
index 000000000..26253283c
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/updates/balrog.py
@@ -0,0 +1,149 @@
+from itertools import chain
+import os
+
+from mozharness.base.log import INFO
+
+
+# BalrogMixin {{{1
+class BalrogMixin(object):
+ @staticmethod
+ def _query_balrog_username(server_config, product=None):
+ username = server_config["balrog_usernames"].get(product)
+ if username:
+ return username
+ else:
+ raise KeyError("Couldn't find balrog username.")
+
+ def generate_balrog_props(self, props_path):
+ self.set_buildbot_property(
+ "hashType", self.config.get("hash_type", "sha512"), write_to_file=True
+ )
+
+ if self.buildbot_config and "properties" in self.buildbot_config:
+ buildbot_properties = self.buildbot_config["properties"].items()
+ else:
+ buildbot_properties = []
+
+ balrog_props = dict(properties=dict(chain(
+ buildbot_properties,
+ self.buildbot_properties.items(),
+ )))
+ if self.config.get('balrog_platform'):
+ balrog_props["properties"]["platform"] = self.config['balrog_platform']
+ if "branch" not in balrog_props["properties"]:
+ balrog_props["properties"]["branch"] = self.branch
+
+ self.dump_config(props_path, balrog_props)
+
+ def submit_balrog_updates(self, release_type="nightly", product=None):
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ if self.buildbot_config and "properties" in self.buildbot_config:
+ product = self.buildbot_config["properties"]["product"]
+
+ if product is None:
+ self.fatal('There is no valid product information.')
+
+ props_path = os.path.join(dirs["base_work_dir"], "balrog_props.json")
+ credentials_file = os.path.join(
+ dirs["base_work_dir"], c["balrog_credentials_file"]
+ )
+ submitter_script = os.path.join(
+ dirs["abs_tools_dir"], "scripts", "updates", "balrog-submitter.py"
+ )
+
+ self.generate_balrog_props(props_path)
+
+ cmd = [
+ self.query_exe("python"),
+ submitter_script,
+ "--build-properties", props_path,
+ "-t", release_type,
+ "--credentials-file", credentials_file,
+ ]
+ if self._log_level_at_least(INFO):
+ cmd.append("--verbose")
+
+ return_codes = []
+ for server in c["balrog_servers"]:
+ server_args = [
+ "--api-root", server["balrog_api_root"],
+ "--username", self._query_balrog_username(server, product)
+ ]
+ if server.get("url_replacements"):
+ for replacement in server["url_replacements"]:
+ server_args.append("--url-replacement")
+ server_args.append(",".join(replacement))
+
+ self.info("Calling Balrog submission script")
+ return_code = self.retry(
+ self.run_command, attempts=5, args=(cmd + server_args,),
+ good_statuses=(0,),
+ )
+ if server["ignore_failures"]:
+ self.info("Ignoring result, ignore_failures set to True")
+ else:
+ return_codes.append(return_code)
+ # return the worst (max) code
+ return max(return_codes)
+
+ def submit_balrog_release_pusher(self, dirs):
+ product = self.buildbot_config["properties"]["product"]
+ cmd = [self.query_exe("python"), os.path.join(os.path.join(dirs['abs_tools_dir'], "scripts/updates/balrog-release-pusher.py"))]
+ cmd.extend(["--build-properties", os.path.join(dirs["base_work_dir"], "balrog_props.json")])
+ cmd.extend(["--buildbot-configs", "https://hg.mozilla.org/build/buildbot-configs"])
+ cmd.extend(["--release-config", os.path.join(dirs['build_dir'], self.config.get("release_config_file"))])
+ cmd.extend(["--credentials-file", os.path.join(dirs['base_work_dir'], self.config.get("balrog_credentials_file"))])
+ cmd.extend(["--release-channel", self.query_release_config()['release_channel']])
+
+ return_codes = []
+ for server in self.config["balrog_servers"]:
+
+ server_args = [
+ "--api-root", server["balrog_api_root"],
+ "--username", self._query_balrog_username(server, product)
+ ]
+
+ self.info("Calling Balrog release pusher script")
+ return_code = self.retry(
+ self.run_command, args=(cmd + server_args,),
+ kwargs={'cwd': dirs['abs_work_dir']},
+ good_statuses=(0,),
+ )
+ if server["ignore_failures"]:
+ self.info("Ignoring result, ignore_failures set to True")
+ else:
+ return_codes.append(return_code)
+ # return the worst (max) code
+ return max(return_codes)
+
+ def lock_balrog_rules(self, rule_ids):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ submitter_script = os.path.join(
+ dirs["abs_tools_dir"], "scripts", "updates",
+ "balrog-nightly-locker.py"
+ )
+ credentials_file = os.path.join(
+ dirs["base_work_dir"], c["balrog_credentials_file"]
+ )
+
+ cmd = [
+ self.query_exe("python"),
+ submitter_script,
+ "--credentials-file", credentials_file,
+ "--api-root", c["balrog_api_root"],
+ "--username", c["balrog_username"],
+ ]
+ for r in rule_ids:
+ cmd.extend(["-r", str(r)])
+
+ if self._log_level_at_least(INFO):
+ cmd.append("--verbose")
+
+ cmd.append("lock")
+
+ self.info("Calling Balrog rule locking script.")
+ self.retry(self.run_command, attempts=5, args=cmd,
+ kwargs={"halt_on_failure": True})
diff --git a/testing/mozharness/mozharness/mozilla/vcstools.py b/testing/mozharness/mozharness/mozilla/vcstools.py
new file mode 100644
index 000000000..b73a4767d
--- /dev/null
+++ b/testing/mozharness/mozharness/mozilla/vcstools.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""vcstools.py
+
+Author: Armen Zambrano G.
+"""
+import os
+
+from mozharness.base.script import PreScriptAction
+from mozharness.base.vcs.vcsbase import VCSScript
+
+VCS_TOOLS = ('gittool.py',)
+
+
+class VCSToolsScript(VCSScript):
+ ''' This script allows us to fetch gittool.py if
+ we're running the script on developer mode.
+ '''
+ @PreScriptAction('checkout')
+ def _pre_checkout(self, action):
+ if self.config.get('developer_mode'):
+ # We put them on base_work_dir to prevent the clobber action
+ # to delete them before we use them
+ for vcs_tool in VCS_TOOLS:
+ file_path = self.query_exe(vcs_tool)
+ if not os.path.exists(file_path):
+ self.download_file(
+ url=self.config[vcs_tool],
+ file_name=file_path,
+ parent_dir=os.path.dirname(file_path),
+ create_parent_dir=True,
+ )
+ self.chmod(file_path, 0755)
+ else:
+ # We simply verify that everything is in order
+ # or if the user forgot to specify developer mode
+ for vcs_tool in VCS_TOOLS:
+ file_path = self.which(vcs_tool)
+
+ if not file_path:
+ file_path = self.query_exe(vcs_tool)
+
+ # If the tool is specified and it is a list is
+ # because we're running on Windows and we won't check
+ if type(self.query_exe(vcs_tool)) is list:
+ continue
+
+ if file_path is None:
+ self.fatal("This machine is missing %s, if this is your "
+ "local machine you can use --cfg "
+ "developer_config.py" % vcs_tool)
+ elif not self.is_exe(file_path):
+ self.critical("%s is not executable." % file_path)
diff --git a/testing/mozharness/mozinfo/__init__.py b/testing/mozharness/mozinfo/__init__.py
new file mode 100644
index 000000000..904dfef71
--- /dev/null
+++ b/testing/mozharness/mozinfo/__init__.py
@@ -0,0 +1,56 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+interface to transform introspected system information to a format palatable to
+Mozilla
+
+Module variables:
+
+.. attribute:: bits
+
+ 32 or 64
+
+.. attribute:: isBsd
+
+ Returns ``True`` if the operating system is BSD
+
+.. attribute:: isLinux
+
+ Returns ``True`` if the operating system is Linux
+
+.. attribute:: isMac
+
+ Returns ``True`` if the operating system is Mac
+
+.. attribute:: isWin
+
+ Returns ``True`` if the operating system is Windows
+
+.. attribute:: os
+
+ Operating system [``'win'``, ``'mac'``, ``'linux'``, ...]
+
+.. attribute:: processor
+
+ Processor architecture [``'x86'``, ``'x86_64'``, ``'ppc'``, ...]
+
+.. attribute:: version
+
+ Operating system version string. For windows, the service pack information is also included
+
+.. attribute:: info
+
+ Returns information identifying the current system.
+
+ * :attr:`bits`
+ * :attr:`os`
+ * :attr:`processor`
+ * :attr:`version`
+
+"""
+
+import mozinfo
+from mozinfo import *
+__all__ = mozinfo.__all__
diff --git a/testing/mozharness/mozinfo/mozinfo.py b/testing/mozharness/mozinfo/mozinfo.py
new file mode 100755
index 000000000..718e1a9d7
--- /dev/null
+++ b/testing/mozharness/mozinfo/mozinfo.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# TODO: it might be a good idea of adding a system name (e.g. 'Ubuntu' for
+# linux) to the information; I certainly wouldn't want anyone parsing this
+# information and having behaviour depend on it
+
+import json
+import os
+import platform
+import re
+import sys
+
+import mozfile
+
+# keep a copy of the os module since updating globals overrides this
+_os = os
+
+class unknown(object):
+ """marker class for unknown information"""
+ def __nonzero__(self):
+ return False
+ def __str__(self):
+ return 'UNKNOWN'
+unknown = unknown() # singleton
+
+# get system information
+info = {'os': unknown,
+ 'processor': unknown,
+ 'version': unknown,
+ 'bits': unknown }
+(system, node, release, version, machine, processor) = platform.uname()
+(bits, linkage) = platform.architecture()
+
+# get os information and related data
+if system in ["Microsoft", "Windows"]:
+ info['os'] = 'win'
+ # There is a Python bug on Windows to determine platform values
+ # http://bugs.python.org/issue7860
+ if "PROCESSOR_ARCHITEW6432" in os.environ:
+ processor = os.environ.get("PROCESSOR_ARCHITEW6432", processor)
+ else:
+ processor = os.environ.get('PROCESSOR_ARCHITECTURE', processor)
+ system = os.environ.get("OS", system).replace('_', ' ')
+ service_pack = os.sys.getwindowsversion()[4]
+ info['service_pack'] = service_pack
+elif system == "Linux":
+ if hasattr(platform, "linux_distribution"):
+ (distro, version, codename) = platform.linux_distribution()
+ else:
+ (distro, version, codename) = platform.dist()
+ version = "%s %s" % (distro, version)
+ if not processor:
+ processor = machine
+ info['os'] = 'linux'
+elif system in ['DragonFly', 'FreeBSD', 'NetBSD', 'OpenBSD']:
+ info['os'] = 'bsd'
+ version = sys.platform
+elif system == "Darwin":
+ (release, versioninfo, machine) = platform.mac_ver()
+ version = "OS X %s" % release
+ info['os'] = 'mac'
+elif sys.platform in ('solaris', 'sunos5'):
+ info['os'] = 'unix'
+ version = sys.platform
+info['version'] = version # os version
+
+# processor type and bits
+if processor in ["i386", "i686"]:
+ if bits == "32bit":
+ processor = "x86"
+ elif bits == "64bit":
+ processor = "x86_64"
+elif processor.upper() == "AMD64":
+ bits = "64bit"
+ processor = "x86_64"
+elif processor == "Power Macintosh":
+ processor = "ppc"
+bits = re.search('(\d+)bit', bits).group(1)
+info.update({'processor': processor,
+ 'bits': int(bits),
+ })
+
+# standard value of choices, for easy inspection
+choices = {'os': ['linux', 'bsd', 'win', 'mac', 'unix'],
+ 'bits': [32, 64],
+ 'processor': ['x86', 'x86_64', 'ppc']}
+
+
+def sanitize(info):
+ """Do some sanitization of input values, primarily
+ to handle universal Mac builds."""
+ if "processor" in info and info["processor"] == "universal-x86-x86_64":
+ # If we're running on OS X 10.6 or newer, assume 64-bit
+ if release[:4] >= "10.6": # Note this is a string comparison
+ info["processor"] = "x86_64"
+ info["bits"] = 64
+ else:
+ info["processor"] = "x86"
+ info["bits"] = 32
+
+# method for updating information
+def update(new_info):
+ """
+ Update the info.
+
+ :param new_info: Either a dict containing the new info or a path/url
+ to a json file containing the new info.
+ """
+
+ if isinstance(new_info, basestring):
+ f = mozfile.load(new_info)
+ new_info = json.loads(f.read())
+ f.close()
+
+ info.update(new_info)
+ sanitize(info)
+ globals().update(info)
+
+ # convenience data for os access
+ for os_name in choices['os']:
+ globals()['is' + os_name.title()] = info['os'] == os_name
+ # unix is special
+ if isLinux or isBsd:
+ globals()['isUnix'] = True
+
+def find_and_update_from_json(*dirs):
+ """
+ Find a mozinfo.json file, load it, and update the info with the
+ contents.
+
+ :param dirs: Directories in which to look for the file. They will be
+ searched after first looking in the root of the objdir
+ if the current script is being run from a Mozilla objdir.
+
+ Returns the full path to mozinfo.json if it was found, or None otherwise.
+ """
+ # First, see if we're in an objdir
+ try:
+ from mozbuild.base import MozbuildObject
+ build = MozbuildObject.from_environment()
+ json_path = _os.path.join(build.topobjdir, "mozinfo.json")
+ if _os.path.isfile(json_path):
+ update(json_path)
+ return json_path
+ except ImportError:
+ pass
+
+ for d in dirs:
+ d = _os.path.abspath(d)
+ json_path = _os.path.join(d, "mozinfo.json")
+ if _os.path.isfile(json_path):
+ update(json_path)
+ return json_path
+
+ return None
+
+update({})
+
+# exports
+__all__ = info.keys()
+__all__ += ['is' + os_name.title() for os_name in choices['os']]
+__all__ += [
+ 'info',
+ 'unknown',
+ 'main',
+ 'choices',
+ 'update',
+ 'find_and_update_from_json',
+ ]
+
+def main(args=None):
+
+ # parse the command line
+ from optparse import OptionParser
+ parser = OptionParser(description=__doc__)
+ for key in choices:
+ parser.add_option('--%s' % key, dest=key,
+ action='store_true', default=False,
+ help="display choices for %s" % key)
+ options, args = parser.parse_args()
+
+ # args are JSON blobs to override info
+ if args:
+ for arg in args:
+ if _os.path.exists(arg):
+ string = file(arg).read()
+ else:
+ string = arg
+ update(json.loads(string))
+
+ # print out choices if requested
+ flag = False
+ for key, value in options.__dict__.items():
+ if value is True:
+ print '%s choices: %s' % (key, ' '.join([str(choice)
+ for choice in choices[key]]))
+ flag = True
+ if flag: return
+
+ # otherwise, print out all info
+ for key, value in info.items():
+ print '%s: %s' % (key, value)
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozharness/mozprocess/__init__.py b/testing/mozharness/mozprocess/__init__.py
new file mode 100644
index 000000000..6f4ae4945
--- /dev/null
+++ b/testing/mozharness/mozprocess/__init__.py
@@ -0,0 +1,5 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from processhandler import *
diff --git a/testing/mozharness/mozprocess/pid.py b/testing/mozharness/mozprocess/pid.py
new file mode 100755
index 000000000..d1f0d9336
--- /dev/null
+++ b/testing/mozharness/mozprocess/pid.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import mozinfo
+import shlex
+import subprocess
+import sys
+
+# determine the platform-specific invocation of `ps`
+if mozinfo.isMac:
+ psarg = '-Acj'
+elif mozinfo.isLinux:
+ psarg = 'axwww'
+else:
+ psarg = 'ax'
+
+def ps(arg=psarg):
+ """
+ python front-end to `ps`
+ http://en.wikipedia.org/wiki/Ps_%28Unix%29
+ returns a list of process dicts based on the `ps` header
+ """
+ retval = []
+ process = subprocess.Popen(['ps', arg], stdout=subprocess.PIPE)
+ stdout, _ = process.communicate()
+ header = None
+ for line in stdout.splitlines():
+ line = line.strip()
+ if header is None:
+ # first line is the header
+ header = line.split()
+ continue
+ split = line.split(None, len(header)-1)
+ process_dict = dict(zip(header, split))
+ retval.append(process_dict)
+ return retval
+
+def running_processes(name, psarg=psarg, defunct=True):
+ """
+ returns a list of
+ {'PID': PID of process (int)
+ 'command': command line of process (list)}
+ with the executable named `name`.
+ - defunct: whether to return defunct processes
+ """
+ retval = []
+ for process in ps(psarg):
+ # Support for both BSD and UNIX syntax
+ # `ps aux` returns COMMAND, `ps -ef` returns CMD
+ try:
+ command = process['COMMAND']
+ except KeyError:
+ command = process['CMD']
+
+ command = shlex.split(command)
+ if command[-1] == '<defunct>':
+ command = command[:-1]
+ if not command or not defunct:
+ continue
+ if 'STAT' in process and not defunct:
+ if process['STAT'] == 'Z+':
+ continue
+ prog = command[0]
+ basename = os.path.basename(prog)
+ if basename == name:
+ retval.append((int(process['PID']), command))
+ return retval
+
+def get_pids(name):
+ """Get all the pids matching name"""
+
+ if mozinfo.isWin:
+ # use the windows-specific implementation
+ import wpk
+ return wpk.get_pids(name)
+ else:
+ return [pid for pid,_ in running_processes(name)]
+
+if __name__ == '__main__':
+ pids = set()
+ for i in sys.argv[1:]:
+ pids.update(get_pids(i))
+ for i in sorted(pids):
+ print i
diff --git a/testing/mozharness/mozprocess/processhandler.py b/testing/mozharness/mozprocess/processhandler.py
new file mode 100644
index 000000000..b89e17eb0
--- /dev/null
+++ b/testing/mozharness/mozprocess/processhandler.py
@@ -0,0 +1,921 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import select
+import signal
+import subprocess
+import sys
+import threading
+import time
+import traceback
+from Queue import Queue
+from datetime import datetime, timedelta
+__all__ = ['ProcessHandlerMixin', 'ProcessHandler']
+
+# Set the MOZPROCESS_DEBUG environment variable to 1 to see some debugging output
+MOZPROCESS_DEBUG = os.getenv("MOZPROCESS_DEBUG")
+
+# We dont use mozinfo because it is expensive to import, see bug 933558.
+isWin = os.name == "nt"
+isPosix = os.name == "posix" # includes MacOS X
+
+if isWin:
+ import ctypes, ctypes.wintypes, msvcrt
+ from ctypes import sizeof, addressof, c_ulong, byref, POINTER, WinError, c_longlong
+ import winprocess
+ from qijo import JobObjectAssociateCompletionPortInformation,\
+ JOBOBJECT_ASSOCIATE_COMPLETION_PORT, JobObjectExtendedLimitInformation,\
+ JOBOBJECT_BASIC_LIMIT_INFORMATION, JOBOBJECT_EXTENDED_LIMIT_INFORMATION, IO_COUNTERS
+
+class ProcessHandlerMixin(object):
+ """
+ A class for launching and manipulating local processes.
+
+ :param cmd: command to run. May be a string or a list. If specified as a list, the first element will be interpreted as the command, and all additional elements will be interpreted as arguments to that command.
+ :param args: list of arguments to pass to the command (defaults to None). Must not be set when `cmd` is specified as a list.
+ :param cwd: working directory for command (defaults to None).
+ :param env: is the environment to use for the process (defaults to os.environ).
+ :param ignore_children: causes system to ignore child processes when True, defaults to False (which tracks child processes).
+ :param kill_on_timeout: when True, the process will be killed when a timeout is reached. When False, the caller is responsible for killing the process. Failure to do so could cause a call to wait() to hang indefinitely. (Defaults to True.)
+ :param processOutputLine: function to be called for each line of output produced by the process (defaults to None).
+ :param onTimeout: function to be called when the process times out.
+ :param onFinish: function to be called when the process terminates normally without timing out.
+ :param kwargs: additional keyword args to pass directly into Popen.
+
+ NOTE: Child processes will be tracked by default. If for any reason
+ we are unable to track child processes and ignore_children is set to False,
+ then we will fall back to only tracking the root process. The fallback
+ will be logged.
+ """
+
+ class Process(subprocess.Popen):
+ """
+ Represents our view of a subprocess.
+ It adds a kill() method which allows it to be stopped explicitly.
+ """
+
+ MAX_IOCOMPLETION_PORT_NOTIFICATION_DELAY = 180
+ MAX_PROCESS_KILL_DELAY = 30
+
+ def __init__(self,
+ args,
+ bufsize=0,
+ executable=None,
+ stdin=None,
+ stdout=None,
+ stderr=None,
+ preexec_fn=None,
+ close_fds=False,
+ shell=False,
+ cwd=None,
+ env=None,
+ universal_newlines=False,
+ startupinfo=None,
+ creationflags=0,
+ ignore_children=False):
+
+ # Parameter for whether or not we should attempt to track child processes
+ self._ignore_children = ignore_children
+
+ if not self._ignore_children and not isWin:
+ # Set the process group id for linux systems
+ # Sets process group id to the pid of the parent process
+ # NOTE: This prevents you from using preexec_fn and managing
+ # child processes, TODO: Ideally, find a way around this
+ def setpgidfn():
+ os.setpgid(0, 0)
+ preexec_fn = setpgidfn
+
+ try:
+ subprocess.Popen.__init__(self, args, bufsize, executable,
+ stdin, stdout, stderr,
+ preexec_fn, close_fds,
+ shell, cwd, env,
+ universal_newlines, startupinfo, creationflags)
+ except OSError, e:
+ print >> sys.stderr, args
+ raise
+
+ def __del__(self, _maxint=sys.maxint):
+ if isWin:
+ if self._handle:
+ if hasattr(self, '_internal_poll'):
+ self._internal_poll(_deadstate=_maxint)
+ else:
+ self.poll(_deadstate=sys.maxint)
+ if self._handle or self._job or self._io_port:
+ self._cleanup()
+ else:
+ subprocess.Popen.__del__(self)
+
+ def kill(self, sig=None):
+ self.returncode = 0
+ if isWin:
+ if not self._ignore_children and self._handle and self._job:
+ winprocess.TerminateJobObject(self._job, winprocess.ERROR_CONTROL_C_EXIT)
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ elif self._handle:
+ err = None
+ try:
+ winprocess.TerminateProcess(self._handle, winprocess.ERROR_CONTROL_C_EXIT)
+ except:
+ err = "Could not terminate process"
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ self._cleanup()
+ if err is not None:
+ raise OSError(err)
+ else:
+ sig = sig or signal.SIGKILL
+ if not self._ignore_children:
+ try:
+ os.killpg(self.pid, sig)
+ except BaseException, e:
+ if getattr(e, "errno", None) != 3:
+ # Error 3 is "no such process", which is ok
+ print >> sys.stdout, "Could not kill process, could not find pid: %s, assuming it's already dead" % self.pid
+ else:
+ os.kill(self.pid, sig)
+ self.returncode = -sig
+
+ self._cleanup()
+ return self.returncode
+
+ def wait(self):
+ """ Popen.wait
+ Called to wait for a running process to shut down and return
+ its exit code
+ Returns the main process's exit code
+ """
+ # This call will be different for each OS
+ self.returncode = self._wait()
+ self._cleanup()
+ return self.returncode
+
+ """ Private Members of Process class """
+
+ if isWin:
+ # Redefine the execute child so that we can track process groups
+ def _execute_child(self, *args_tuple):
+ # workaround for bug 950894
+ if sys.hexversion < 0x02070600: # prior to 2.7.6
+ (args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines, startupinfo,
+ creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite) = args_tuple
+ to_close = set()
+ else: # 2.7.6 and later
+ (args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines, startupinfo,
+ creationflags, shell, to_close,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite) = args_tuple
+ if not isinstance(args, basestring):
+ args = subprocess.list2cmdline(args)
+
+ # Always or in the create new process group
+ creationflags |= winprocess.CREATE_NEW_PROCESS_GROUP
+
+ if startupinfo is None:
+ startupinfo = winprocess.STARTUPINFO()
+
+ if None not in (p2cread, c2pwrite, errwrite):
+ startupinfo.dwFlags |= winprocess.STARTF_USESTDHANDLES
+ startupinfo.hStdInput = int(p2cread)
+ startupinfo.hStdOutput = int(c2pwrite)
+ startupinfo.hStdError = int(errwrite)
+ if shell:
+ startupinfo.dwFlags |= winprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = winprocess.SW_HIDE
+ comspec = os.environ.get("COMSPEC", "cmd.exe")
+ args = comspec + " /c " + args
+
+ # determine if we can create create a job
+ canCreateJob = winprocess.CanCreateJobObject()
+
+ # Ensure we write a warning message if we are falling back
+ if not canCreateJob and not self._ignore_children:
+ # We can't create job objects AND the user wanted us to
+ # Warn the user about this.
+ print >> sys.stderr, "ProcessManager UNABLE to use job objects to manage child processes"
+
+ # set process creation flags
+ creationflags |= winprocess.CREATE_SUSPENDED
+ creationflags |= winprocess.CREATE_UNICODE_ENVIRONMENT
+ if canCreateJob:
+ creationflags |= winprocess.CREATE_BREAKAWAY_FROM_JOB
+ else:
+ # Since we've warned, we just log info here to inform you
+ # of the consequence of setting ignore_children = True
+ print "ProcessManager NOT managing child processes"
+
+ # create the process
+ hp, ht, pid, tid = winprocess.CreateProcess(
+ executable, args,
+ None, None, # No special security
+ 1, # Must inherit handles!
+ creationflags,
+ winprocess.EnvironmentBlock(env),
+ cwd, startupinfo)
+ self._child_created = True
+ self._handle = hp
+ self._thread = ht
+ self.pid = pid
+ self.tid = tid
+
+ if not self._ignore_children and canCreateJob:
+ try:
+ # We create a new job for this process, so that we can kill
+ # the process and any sub-processes
+ # Create the IO Completion Port
+ self._io_port = winprocess.CreateIoCompletionPort()
+ self._job = winprocess.CreateJobObject()
+
+ # Now associate the io comp port and the job object
+ joacp = JOBOBJECT_ASSOCIATE_COMPLETION_PORT(winprocess.COMPKEY_JOBOBJECT,
+ self._io_port)
+ winprocess.SetInformationJobObject(self._job,
+ JobObjectAssociateCompletionPortInformation,
+ addressof(joacp),
+ sizeof(joacp)
+ )
+
+ # Allow subprocesses to break away from us - necessary for
+ # flash with protected mode
+ jbli = JOBOBJECT_BASIC_LIMIT_INFORMATION(
+ c_longlong(0), # per process time limit (ignored)
+ c_longlong(0), # per job user time limit (ignored)
+ winprocess.JOB_OBJECT_LIMIT_BREAKAWAY_OK,
+ 0, # min working set (ignored)
+ 0, # max working set (ignored)
+ 0, # active process limit (ignored)
+ None, # affinity (ignored)
+ 0, # Priority class (ignored)
+ 0, # Scheduling class (ignored)
+ )
+
+ iocntr = IO_COUNTERS()
+ jeli = JOBOBJECT_EXTENDED_LIMIT_INFORMATION(
+ jbli, # basic limit info struct
+ iocntr, # io_counters (ignored)
+ 0, # process mem limit (ignored)
+ 0, # job mem limit (ignored)
+ 0, # peak process limit (ignored)
+ 0) # peak job limit (ignored)
+
+ winprocess.SetInformationJobObject(self._job,
+ JobObjectExtendedLimitInformation,
+ addressof(jeli),
+ sizeof(jeli)
+ )
+
+ # Assign the job object to the process
+ winprocess.AssignProcessToJobObject(self._job, int(hp))
+
+ # It's overkill, but we use Queue to signal between threads
+ # because it handles errors more gracefully than event or condition.
+ self._process_events = Queue()
+
+ # Spin up our thread for managing the IO Completion Port
+ self._procmgrthread = threading.Thread(target = self._procmgr)
+ except:
+ print >> sys.stderr, """Exception trying to use job objects;
+falling back to not using job objects for managing child processes"""
+ tb = traceback.format_exc()
+ print >> sys.stderr, tb
+ # Ensure no dangling handles left behind
+ self._cleanup_job_io_port()
+ else:
+ self._job = None
+
+ winprocess.ResumeThread(int(ht))
+ if getattr(self, '_procmgrthread', None):
+ self._procmgrthread.start()
+ ht.Close()
+
+ for i in (p2cread, c2pwrite, errwrite):
+ if i is not None:
+ i.Close()
+
+ # Windows Process Manager - watches the IO Completion Port and
+ # keeps track of child processes
+ def _procmgr(self):
+ if not (self._io_port) or not (self._job):
+ return
+
+ try:
+ self._poll_iocompletion_port()
+ except KeyboardInterrupt:
+ raise KeyboardInterrupt
+
+ def _poll_iocompletion_port(self):
+ # Watch the IO Completion port for status
+ self._spawned_procs = {}
+ countdowntokill = 0
+
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC Self.pid value is: %s" % self.pid
+
+ while True:
+ msgid = c_ulong(0)
+ compkey = c_ulong(0)
+ pid = c_ulong(0)
+ portstatus = winprocess.GetQueuedCompletionStatus(self._io_port,
+ byref(msgid),
+ byref(compkey),
+ byref(pid),
+ 5000)
+
+ # If the countdowntokill has been activated, we need to check
+ # if we should start killing the children or not.
+ if countdowntokill != 0:
+ diff = datetime.now() - countdowntokill
+ # Arbitrarily wait 3 minutes for windows to get its act together
+ # Windows sometimes takes a small nap between notifying the
+ # IO Completion port and actually killing the children, and we
+ # don't want to mistake that situation for the situation of an unexpected
+ # parent abort (which is what we're looking for here).
+ if diff.seconds > self.MAX_IOCOMPLETION_PORT_NOTIFICATION_DELAY:
+ print >> sys.stderr, "Parent process %s exited with children alive:" % self.pid
+ print >> sys.stderr, "PIDS: %s" % ', '.join([str(i) for i in self._spawned_procs])
+ print >> sys.stderr, "Attempting to kill them..."
+ self.kill()
+ self._process_events.put({self.pid: 'FINISHED'})
+
+ if not portstatus:
+ # Check to see what happened
+ errcode = winprocess.GetLastError()
+ if errcode == winprocess.ERROR_ABANDONED_WAIT_0:
+ # Then something has killed the port, break the loop
+ print >> sys.stderr, "IO Completion Port unexpectedly closed"
+ break
+ elif errcode == winprocess.WAIT_TIMEOUT:
+ # Timeouts are expected, just keep on polling
+ continue
+ else:
+ print >> sys.stderr, "Error Code %s trying to query IO Completion Port, exiting" % errcode
+ raise WinError(errcode)
+ break
+
+ if compkey.value == winprocess.COMPKEY_TERMINATE.value:
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC compkeyterminate detected"
+ # Then we're done
+ break
+
+ # Check the status of the IO Port and do things based on it
+ if compkey.value == winprocess.COMPKEY_JOBOBJECT.value:
+ if msgid.value == winprocess.JOB_OBJECT_MSG_ACTIVE_PROCESS_ZERO:
+ # No processes left, time to shut down
+ # Signal anyone waiting on us that it is safe to shut down
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC job object msg active processes zero"
+ self._process_events.put({self.pid: 'FINISHED'})
+ break
+ elif msgid.value == winprocess.JOB_OBJECT_MSG_NEW_PROCESS:
+ # New Process started
+ # Add the child proc to our list in case our parent flakes out on us
+ # without killing everything.
+ if pid.value != self.pid:
+ self._spawned_procs[pid.value] = 1
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC new process detected with pid value: %s" % pid.value
+ elif msgid.value == winprocess.JOB_OBJECT_MSG_EXIT_PROCESS:
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC process id %s exited normally" % pid.value
+ # One process exited normally
+ if pid.value == self.pid and len(self._spawned_procs) > 0:
+ # Parent process dying, start countdown timer
+ countdowntokill = datetime.now()
+ elif pid.value in self._spawned_procs:
+ # Child Process died remove from list
+ del(self._spawned_procs[pid.value])
+ elif msgid.value == winprocess.JOB_OBJECT_MSG_ABNORMAL_EXIT_PROCESS:
+ # One process existed abnormally
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC process id %s existed abnormally" % pid.value
+ if pid.value == self.pid and len(self._spawned_procs) > 0:
+ # Parent process dying, start countdown timer
+ countdowntokill = datetime.now()
+ elif pid.value in self._spawned_procs:
+ # Child Process died remove from list
+ del self._spawned_procs[pid.value]
+ else:
+ # We don't care about anything else
+ if MOZPROCESS_DEBUG:
+ print "DBG::MOZPROC We got a message %s" % msgid.value
+ pass
+
+ def _wait(self):
+
+ # First, check to see if the process is still running
+ if self._handle:
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ else:
+ # Dude, the process is like totally dead!
+ return self.returncode
+
+ # Python 2.5 uses isAlive versus is_alive use the proper one
+ threadalive = False
+ if hasattr(self, "_procmgrthread"):
+ if hasattr(self._procmgrthread, 'is_alive'):
+ threadalive = self._procmgrthread.is_alive()
+ else:
+ threadalive = self._procmgrthread.isAlive()
+ if self._job and threadalive:
+ # Then we are managing with IO Completion Ports
+ # wait on a signal so we know when we have seen the last
+ # process come through.
+ # We use queues to synchronize between the thread and this
+ # function because events just didn't have robust enough error
+ # handling on pre-2.7 versions
+ err = None
+ try:
+ # timeout is the max amount of time the procmgr thread will wait for
+ # child processes to shutdown before killing them with extreme prejudice.
+ item = self._process_events.get(timeout=self.MAX_IOCOMPLETION_PORT_NOTIFICATION_DELAY +
+ self.MAX_PROCESS_KILL_DELAY)
+ if item[self.pid] == 'FINISHED':
+ self._process_events.task_done()
+ except:
+ err = "IO Completion Port failed to signal process shutdown"
+ # Either way, let's try to get this code
+ if self._handle:
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ self._cleanup()
+
+ if err is not None:
+ raise OSError(err)
+
+
+ else:
+ # Not managing with job objects, so all we can reasonably do
+ # is call waitforsingleobject and hope for the best
+
+ if MOZPROCESS_DEBUG and not self._ignore_children:
+ print "DBG::MOZPROC NOT USING JOB OBJECTS!!!"
+ # First, make sure we have not already ended
+ if self.returncode != winprocess.STILL_ACTIVE:
+ self._cleanup()
+ return self.returncode
+
+ rc = None
+ if self._handle:
+ rc = winprocess.WaitForSingleObject(self._handle, -1)
+
+ if rc == winprocess.WAIT_TIMEOUT:
+ # The process isn't dead, so kill it
+ print "Timed out waiting for process to close, attempting TerminateProcess"
+ self.kill()
+ elif rc == winprocess.WAIT_OBJECT_0:
+ # We caught WAIT_OBJECT_0, which indicates all is well
+ print "Single process terminated successfully"
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ else:
+ # An error occured we should probably throw
+ rc = winprocess.GetLastError()
+ if rc:
+ raise WinError(rc)
+
+ self._cleanup()
+
+ return self.returncode
+
+ def _cleanup_job_io_port(self):
+ """ Do the job and IO port cleanup separately because there are
+ cases where we want to clean these without killing _handle
+ (i.e. if we fail to create the job object in the first place)
+ """
+ if getattr(self, '_job') and self._job != winprocess.INVALID_HANDLE_VALUE:
+ self._job.Close()
+ self._job = None
+ else:
+ # If windows already freed our handle just set it to none
+ # (saw this intermittently while testing)
+ self._job = None
+
+ if getattr(self, '_io_port', None) and self._io_port != winprocess.INVALID_HANDLE_VALUE:
+ self._io_port.Close()
+ self._io_port = None
+ else:
+ self._io_port = None
+
+ if getattr(self, '_procmgrthread', None):
+ self._procmgrthread = None
+
+ def _cleanup(self):
+ self._cleanup_job_io_port()
+ if self._thread and self._thread != winprocess.INVALID_HANDLE_VALUE:
+ self._thread.Close()
+ self._thread = None
+ else:
+ self._thread = None
+
+ if self._handle and self._handle != winprocess.INVALID_HANDLE_VALUE:
+ self._handle.Close()
+ self._handle = None
+ else:
+ self._handle = None
+
+ elif isPosix:
+
+ def _wait(self):
+ """ Haven't found any reason to differentiate between these platforms
+ so they all use the same wait callback. If it is necessary to
+ craft different styles of wait, then a new _wait method
+ could be easily implemented.
+ """
+
+ if not self._ignore_children:
+ try:
+ # os.waitpid return value:
+ # > [...] a tuple containing its pid and exit status
+ # > indication: a 16-bit number, whose low byte is the
+ # > signal number that killed the process, and whose
+ # > high byte is the exit status (if the signal number
+ # > is zero)
+ # - http://docs.python.org/2/library/os.html#os.wait
+ status = os.waitpid(self.pid, 0)[1]
+
+ # For consistency, format status the same as subprocess'
+ # returncode attribute
+ if status > 255:
+ return status >> 8
+ return -status
+ except OSError, e:
+ if getattr(e, "errno", None) != 10:
+ # Error 10 is "no child process", which could indicate normal
+ # close
+ print >> sys.stderr, "Encountered error waiting for pid to close: %s" % e
+ raise
+ return 0
+
+ else:
+ # For non-group wait, call base class
+ subprocess.Popen.wait(self)
+ return self.returncode
+
+ def _cleanup(self):
+ pass
+
+ else:
+ # An unrecognized platform, we will call the base class for everything
+ print >> sys.stderr, "Unrecognized platform, process groups may not be managed properly"
+
+ def _wait(self):
+ self.returncode = subprocess.Popen.wait(self)
+ return self.returncode
+
+ def _cleanup(self):
+ pass
+
+ def __init__(self,
+ cmd,
+ args=None,
+ cwd=None,
+ env=None,
+ ignore_children = False,
+ kill_on_timeout = True,
+ processOutputLine=(),
+ onTimeout=(),
+ onFinish=(),
+ **kwargs):
+ self.cmd = cmd
+ self.args = args
+ self.cwd = cwd
+ self.didTimeout = False
+ self._ignore_children = ignore_children
+ self._kill_on_timeout = kill_on_timeout
+ self.keywordargs = kwargs
+ self.outThread = None
+ self.read_buffer = ''
+
+ if env is None:
+ env = os.environ.copy()
+ self.env = env
+
+ # handlers
+ self.processOutputLineHandlers = list(processOutputLine)
+ self.onTimeoutHandlers = list(onTimeout)
+ self.onFinishHandlers = list(onFinish)
+
+ # It is common for people to pass in the entire array with the cmd and
+ # the args together since this is how Popen uses it. Allow for that.
+ if isinstance(self.cmd, list):
+ if self.args != None:
+ raise TypeError("cmd and args must not both be lists")
+ (self.cmd, self.args) = (self.cmd[0], self.cmd[1:])
+ elif self.args is None:
+ self.args = []
+
+ @property
+ def timedOut(self):
+ """True if the process has timed out."""
+ return self.didTimeout
+
+ @property
+ def commandline(self):
+ """the string value of the command line (command + args)"""
+ return subprocess.list2cmdline([self.cmd] + self.args)
+
+ def run(self, timeout=None, outputTimeout=None):
+ """
+ Starts the process.
+
+ If timeout is not None, the process will be allowed to continue for
+ that number of seconds before being killed. If the process is killed
+ due to a timeout, the onTimeout handler will be called.
+
+ If outputTimeout is not None, the process will be allowed to continue
+ for that number of seconds without producing any output before
+ being killed.
+ """
+ self.didTimeout = False
+ self.startTime = datetime.now()
+
+ # default arguments
+ args = dict(stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=self.cwd,
+ env=self.env,
+ ignore_children=self._ignore_children)
+
+ # build process arguments
+ args.update(self.keywordargs)
+
+ # launch the process
+ self.proc = self.Process([self.cmd] + self.args, **args)
+
+ self.processOutput(timeout=timeout, outputTimeout=outputTimeout)
+
+ def kill(self, sig=None):
+ """
+ Kills the managed process.
+
+ If you created the process with 'ignore_children=False' (the
+ default) then it will also also kill all child processes spawned by
+ it. If you specified 'ignore_children=True' when creating the
+ process, only the root process will be killed.
+
+ Note that this does not manage any state, save any output etc,
+ it immediately kills the process.
+
+ :param sig: Signal used to kill the process, defaults to SIGKILL
+ (has no effect on Windows)
+ """
+ try:
+ return self.proc.kill(sig=sig)
+ except AttributeError:
+ # Try to print a relevant error message.
+ if not self.proc:
+ print >> sys.stderr, "Unable to kill Process because call to ProcessHandler constructor failed."
+ else:
+ raise
+
+ def readWithTimeout(self, f, timeout):
+ """
+ Try to read a line of output from the file object *f*.
+
+ *f* must be a pipe, like the *stdout* member of a subprocess.Popen
+ object created with stdout=PIPE. If no output
+ is received within *timeout* seconds, return a blank line.
+
+ Returns a tuple (line, did_timeout), where *did_timeout* is True
+ if the read timed out, and False otherwise.
+ """
+ # Calls a private member because this is a different function based on
+ # the OS
+ return self._readWithTimeout(f, timeout)
+
+ def processOutputLine(self, line):
+ """Called for each line of output that a process sends to stdout/stderr."""
+ for handler in self.processOutputLineHandlers:
+ handler(line)
+
+ def onTimeout(self):
+ """Called when a process times out."""
+ for handler in self.onTimeoutHandlers:
+ handler()
+
+ def onFinish(self):
+ """Called when a process finishes without a timeout."""
+ for handler in self.onFinishHandlers:
+ handler()
+
+ def processOutput(self, timeout=None, outputTimeout=None):
+ """
+ Handle process output until the process terminates or times out.
+
+ If timeout is not None, the process will be allowed to continue for
+ that number of seconds before being killed.
+
+ If outputTimeout is not None, the process will be allowed to continue
+ for that number of seconds without producing any output before
+ being killed.
+ """
+ def _processOutput():
+ self.didTimeout = False
+ logsource = self.proc.stdout
+
+ lineReadTimeout = None
+ if timeout:
+ lineReadTimeout = timeout - (datetime.now() - self.startTime).seconds
+ elif outputTimeout:
+ lineReadTimeout = outputTimeout
+
+ (lines, self.didTimeout) = self.readWithTimeout(logsource, lineReadTimeout)
+ while lines != "":
+ for line in lines.splitlines():
+ self.processOutputLine(line.rstrip())
+
+ if self.didTimeout:
+ break
+
+ if timeout:
+ lineReadTimeout = timeout - (datetime.now() - self.startTime).seconds
+ (lines, self.didTimeout) = self.readWithTimeout(logsource, lineReadTimeout)
+
+ if self.didTimeout:
+ if self._kill_on_timeout:
+ self.proc.kill()
+ self.onTimeout()
+ else:
+ self.onFinish()
+
+ if not hasattr(self, 'proc'):
+ self.run()
+
+ if not self.outThread:
+ self.outThread = threading.Thread(target=_processOutput)
+ self.outThread.daemon = True
+ self.outThread.start()
+
+
+ def wait(self, timeout=None):
+ """
+ Waits until all output has been read and the process is
+ terminated.
+
+ If timeout is not None, will return after timeout seconds.
+ This timeout only causes the wait function to return and
+ does not kill the process.
+
+ Returns the process' exit code. A None value indicates the
+ process hasn't terminated yet. A negative value -N indicates
+ the process was killed by signal N (Unix only).
+ """
+ if self.outThread:
+ # Thread.join() blocks the main thread until outThread is finished
+ # wake up once a second in case a keyboard interrupt is sent
+ count = 0
+ while self.outThread.isAlive():
+ self.outThread.join(timeout=1)
+ count += 1
+ if timeout and count > timeout:
+ return None
+
+ return self.proc.wait()
+
+ # TODO Remove this method when consumers have been fixed
+ def waitForFinish(self, timeout=None):
+ print >> sys.stderr, "MOZPROCESS WARNING: ProcessHandler.waitForFinish() is deprecated, " \
+ "use ProcessHandler.wait() instead"
+ return self.wait(timeout=timeout)
+
+
+ ### Private methods from here on down. Thar be dragons.
+
+ if isWin:
+ # Windows Specific private functions are defined in this block
+ PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
+ GetLastError = ctypes.windll.kernel32.GetLastError
+
+ def _readWithTimeout(self, f, timeout):
+ if timeout is None:
+ # shortcut to allow callers to pass in "None" for no timeout.
+ return (f.readline(), False)
+ x = msvcrt.get_osfhandle(f.fileno())
+ l = ctypes.c_long()
+ done = time.time() + timeout
+ while time.time() < done:
+ if self.PeekNamedPipe(x, None, 0, None, ctypes.byref(l), None) == 0:
+ err = self.GetLastError()
+ if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE
+ return ('', False)
+ else:
+ raise OSError("readWithTimeout got error: %d", err)
+ if l.value > 0:
+ # we're assuming that the output is line-buffered,
+ # which is not unreasonable
+ return (f.readline(), False)
+ time.sleep(0.01)
+ return ('', True)
+
+ else:
+ # Generic
+ def _readWithTimeout(self, f, timeout):
+ while True:
+ try:
+ (r, w, e) = select.select([f], [], [], timeout)
+ except:
+ # return a blank line
+ return ('', True)
+
+ if len(r) == 0:
+ return ('', True)
+
+ output = os.read(f.fileno(), 4096)
+ if not output:
+ output = self.read_buffer
+ self.read_buffer = ''
+ return (output, False)
+ self.read_buffer += output
+ if '\n' not in self.read_buffer:
+ time.sleep(0.01)
+ continue
+ tmp = self.read_buffer.split('\n')
+ lines, self.read_buffer = tmp[:-1], tmp[-1]
+ real_lines = [x for x in lines if x != '']
+ if not real_lines:
+ time.sleep(0.01)
+ continue
+ break
+ return ('\n'.join(lines), False)
+
+ @property
+ def pid(self):
+ return self.proc.pid
+
+
+### default output handlers
+### these should be callables that take the output line
+
+def print_output(line):
+ print line
+
+class StoreOutput(object):
+ """accumulate stdout"""
+
+ def __init__(self):
+ self.output = []
+
+ def __call__(self, line):
+ self.output.append(line)
+
+class LogOutput(object):
+ """pass output to a file"""
+
+ def __init__(self, filename):
+ self.filename = filename
+ self.file = None
+
+ def __call__(self, line):
+ if self.file is None:
+ self.file = file(self.filename, 'a')
+ self.file.write(line + '\n')
+ self.file.flush()
+
+ def __del__(self):
+ if self.file is not None:
+ self.file.close()
+
+### front end class with the default handlers
+
+class ProcessHandler(ProcessHandlerMixin):
+ """
+ Convenience class for handling processes with default output handlers.
+
+ If no processOutputLine keyword argument is specified, write all
+ output to stdout. Otherwise, the function specified by this argument
+ will be called for each line of output; the output will not be written
+ to stdout automatically.
+
+ If storeOutput==True, the output produced by the process will be saved
+ as self.output.
+
+ If logfile is not None, the output produced by the process will be
+ appended to the given file.
+ """
+
+ def __init__(self, cmd, logfile=None, storeOutput=True, **kwargs):
+ kwargs.setdefault('processOutputLine', [])
+
+ # Print to standard output only if no outputline provided
+ if not kwargs['processOutputLine']:
+ kwargs['processOutputLine'].append(print_output)
+
+ if logfile:
+ logoutput = LogOutput(logfile)
+ kwargs['processOutputLine'].append(logoutput)
+
+ self.output = None
+ if storeOutput:
+ storeoutput = StoreOutput()
+ self.output = storeoutput.output
+ kwargs['processOutputLine'].append(storeoutput)
+
+ ProcessHandlerMixin.__init__(self, cmd, **kwargs)
diff --git a/testing/mozharness/mozprocess/qijo.py b/testing/mozharness/mozprocess/qijo.py
new file mode 100644
index 000000000..1ac88430c
--- /dev/null
+++ b/testing/mozharness/mozprocess/qijo.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from ctypes import c_void_p, POINTER, sizeof, Structure, windll, WinError, WINFUNCTYPE, addressof, c_size_t, c_ulong
+from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LARGE_INTEGER
+
+LPVOID = c_void_p
+LPDWORD = POINTER(DWORD)
+SIZE_T = c_size_t
+ULONG_PTR = POINTER(c_ulong)
+
+# A ULONGLONG is a 64-bit unsigned integer.
+# Thus there are 8 bytes in a ULONGLONG.
+# XXX why not import c_ulonglong ?
+ULONGLONG = BYTE * 8
+
+class IO_COUNTERS(Structure):
+ # The IO_COUNTERS struct is 6 ULONGLONGs.
+ # TODO: Replace with non-dummy fields.
+ _fields_ = [('dummy', ULONGLONG * 6)]
+
+class JOBOBJECT_BASIC_ACCOUNTING_INFORMATION(Structure):
+ _fields_ = [('TotalUserTime', LARGE_INTEGER),
+ ('TotalKernelTime', LARGE_INTEGER),
+ ('ThisPeriodTotalUserTime', LARGE_INTEGER),
+ ('ThisPeriodTotalKernelTime', LARGE_INTEGER),
+ ('TotalPageFaultCount', DWORD),
+ ('TotalProcesses', DWORD),
+ ('ActiveProcesses', DWORD),
+ ('TotalTerminatedProcesses', DWORD)]
+
+class JOBOBJECT_BASIC_AND_IO_ACCOUNTING_INFORMATION(Structure):
+ _fields_ = [('BasicInfo', JOBOBJECT_BASIC_ACCOUNTING_INFORMATION),
+ ('IoInfo', IO_COUNTERS)]
+
+# see http://msdn.microsoft.com/en-us/library/ms684147%28VS.85%29.aspx
+class JOBOBJECT_BASIC_LIMIT_INFORMATION(Structure):
+ _fields_ = [('PerProcessUserTimeLimit', LARGE_INTEGER),
+ ('PerJobUserTimeLimit', LARGE_INTEGER),
+ ('LimitFlags', DWORD),
+ ('MinimumWorkingSetSize', SIZE_T),
+ ('MaximumWorkingSetSize', SIZE_T),
+ ('ActiveProcessLimit', DWORD),
+ ('Affinity', ULONG_PTR),
+ ('PriorityClass', DWORD),
+ ('SchedulingClass', DWORD)
+ ]
+
+class JOBOBJECT_ASSOCIATE_COMPLETION_PORT(Structure):
+ _fields_ = [('CompletionKey', c_ulong),
+ ('CompletionPort', HANDLE)]
+
+# see http://msdn.microsoft.com/en-us/library/ms684156%28VS.85%29.aspx
+class JOBOBJECT_EXTENDED_LIMIT_INFORMATION(Structure):
+ _fields_ = [('BasicLimitInformation', JOBOBJECT_BASIC_LIMIT_INFORMATION),
+ ('IoInfo', IO_COUNTERS),
+ ('ProcessMemoryLimit', SIZE_T),
+ ('JobMemoryLimit', SIZE_T),
+ ('PeakProcessMemoryUsed', SIZE_T),
+ ('PeakJobMemoryUsed', SIZE_T)]
+
+# These numbers below come from:
+# http://msdn.microsoft.com/en-us/library/ms686216%28v=vs.85%29.aspx
+JobObjectAssociateCompletionPortInformation = 7
+JobObjectBasicAndIoAccountingInformation = 8
+JobObjectExtendedLimitInformation = 9
+
+class JobObjectInfo(object):
+ mapping = { 'JobObjectBasicAndIoAccountingInformation': 8,
+ 'JobObjectExtendedLimitInformation': 9,
+ 'JobObjectAssociateCompletionPortInformation': 7
+ }
+ structures = {
+ 7: JOBOBJECT_ASSOCIATE_COMPLETION_PORT,
+ 8: JOBOBJECT_BASIC_AND_IO_ACCOUNTING_INFORMATION,
+ 9: JOBOBJECT_EXTENDED_LIMIT_INFORMATION
+ }
+ def __init__(self, _class):
+ if isinstance(_class, basestring):
+ assert _class in self.mapping, 'Class should be one of %s; you gave %s' % (self.mapping, _class)
+ _class = self.mapping[_class]
+ assert _class in self.structures, 'Class should be one of %s; you gave %s' % (self.structures, _class)
+ self.code = _class
+ self.info = self.structures[_class]()
+
+
+QueryInformationJobObjectProto = WINFUNCTYPE(
+ BOOL, # Return type
+ HANDLE, # hJob
+ DWORD, # JobObjectInfoClass
+ LPVOID, # lpJobObjectInfo
+ DWORD, # cbJobObjectInfoLength
+ LPDWORD # lpReturnLength
+ )
+
+QueryInformationJobObjectFlags = (
+ (1, 'hJob'),
+ (1, 'JobObjectInfoClass'),
+ (1, 'lpJobObjectInfo'),
+ (1, 'cbJobObjectInfoLength'),
+ (1, 'lpReturnLength', None)
+ )
+
+_QueryInformationJobObject = QueryInformationJobObjectProto(
+ ('QueryInformationJobObject', windll.kernel32),
+ QueryInformationJobObjectFlags
+ )
+
+class SubscriptableReadOnlyStruct(object):
+ def __init__(self, struct):
+ self._struct = struct
+
+ def _delegate(self, name):
+ result = getattr(self._struct, name)
+ if isinstance(result, Structure):
+ return SubscriptableReadOnlyStruct(result)
+ return result
+
+ def __getitem__(self, name):
+ match = [fname for fname, ftype in self._struct._fields_
+ if fname == name]
+ if match:
+ return self._delegate(name)
+ raise KeyError(name)
+
+ def __getattr__(self, name):
+ return self._delegate(name)
+
+def QueryInformationJobObject(hJob, JobObjectInfoClass):
+ jobinfo = JobObjectInfo(JobObjectInfoClass)
+ result = _QueryInformationJobObject(
+ hJob=hJob,
+ JobObjectInfoClass=jobinfo.code,
+ lpJobObjectInfo=addressof(jobinfo.info),
+ cbJobObjectInfoLength=sizeof(jobinfo.info)
+ )
+ if not result:
+ raise WinError()
+ return SubscriptableReadOnlyStruct(jobinfo.info)
diff --git a/testing/mozharness/mozprocess/winprocess.py b/testing/mozharness/mozprocess/winprocess.py
new file mode 100644
index 000000000..6f3afc8de
--- /dev/null
+++ b/testing/mozharness/mozprocess/winprocess.py
@@ -0,0 +1,457 @@
+# A module to expose various thread/process/job related structures and
+# methods from kernel32
+#
+# The MIT License
+#
+# Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
+#
+# Additions and modifications written by Benjamin Smedberg
+# <benjamin@smedbergs.us> are Copyright (c) 2006 by the Mozilla Foundation
+# <http://www.mozilla.org/>
+#
+# More Modifications
+# Copyright (c) 2006-2007 by Mike Taylor <bear@code-bear.com>
+# Copyright (c) 2007-2008 by Mikeal Rogers <mikeal@mozilla.com>
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of the
+# author not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from ctypes import c_void_p, POINTER, sizeof, Structure, Union, windll, WinError, WINFUNCTYPE, c_ulong
+from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPWSTR, UINT, WORD, ULONG
+from qijo import QueryInformationJobObject
+
+LPVOID = c_void_p
+LPBYTE = POINTER(BYTE)
+LPDWORD = POINTER(DWORD)
+LPBOOL = POINTER(BOOL)
+LPULONG = POINTER(c_ulong)
+
+def ErrCheckBool(result, func, args):
+ """errcheck function for Windows functions that return a BOOL True
+ on success"""
+ if not result:
+ raise WinError()
+ return args
+
+
+# AutoHANDLE
+
+class AutoHANDLE(HANDLE):
+ """Subclass of HANDLE which will call CloseHandle() on deletion."""
+
+ CloseHandleProto = WINFUNCTYPE(BOOL, HANDLE)
+ CloseHandle = CloseHandleProto(("CloseHandle", windll.kernel32))
+ CloseHandle.errcheck = ErrCheckBool
+
+ def Close(self):
+ if self.value and self.value != HANDLE(-1).value:
+ self.CloseHandle(self)
+ self.value = 0
+
+ def __del__(self):
+ self.Close()
+
+ def __int__(self):
+ return self.value
+
+def ErrCheckHandle(result, func, args):
+ """errcheck function for Windows functions that return a HANDLE."""
+ if not result:
+ raise WinError()
+ return AutoHANDLE(result)
+
+# PROCESS_INFORMATION structure
+
+class PROCESS_INFORMATION(Structure):
+ _fields_ = [("hProcess", HANDLE),
+ ("hThread", HANDLE),
+ ("dwProcessID", DWORD),
+ ("dwThreadID", DWORD)]
+
+ def __init__(self):
+ Structure.__init__(self)
+
+ self.cb = sizeof(self)
+
+LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
+
+# STARTUPINFO structure
+
+class STARTUPINFO(Structure):
+ _fields_ = [("cb", DWORD),
+ ("lpReserved", LPWSTR),
+ ("lpDesktop", LPWSTR),
+ ("lpTitle", LPWSTR),
+ ("dwX", DWORD),
+ ("dwY", DWORD),
+ ("dwXSize", DWORD),
+ ("dwYSize", DWORD),
+ ("dwXCountChars", DWORD),
+ ("dwYCountChars", DWORD),
+ ("dwFillAttribute", DWORD),
+ ("dwFlags", DWORD),
+ ("wShowWindow", WORD),
+ ("cbReserved2", WORD),
+ ("lpReserved2", LPBYTE),
+ ("hStdInput", HANDLE),
+ ("hStdOutput", HANDLE),
+ ("hStdError", HANDLE)
+ ]
+LPSTARTUPINFO = POINTER(STARTUPINFO)
+
+SW_HIDE = 0
+
+STARTF_USESHOWWINDOW = 0x01
+STARTF_USESIZE = 0x02
+STARTF_USEPOSITION = 0x04
+STARTF_USECOUNTCHARS = 0x08
+STARTF_USEFILLATTRIBUTE = 0x10
+STARTF_RUNFULLSCREEN = 0x20
+STARTF_FORCEONFEEDBACK = 0x40
+STARTF_FORCEOFFFEEDBACK = 0x80
+STARTF_USESTDHANDLES = 0x100
+
+# EnvironmentBlock
+
+class EnvironmentBlock:
+ """An object which can be passed as the lpEnv parameter of CreateProcess.
+ It is initialized with a dictionary."""
+
+ def __init__(self, dict):
+ if not dict:
+ self._as_parameter_ = None
+ else:
+ values = ["%s=%s" % (key, value)
+ for (key, value) in dict.iteritems()]
+ values.append("")
+ self._as_parameter_ = LPCWSTR("\0".join(values))
+
+# Error Messages we need to watch for go here
+# See: http://msdn.microsoft.com/en-us/library/ms681388%28v=vs.85%29.aspx
+ERROR_ABANDONED_WAIT_0 = 735
+
+# GetLastError()
+GetLastErrorProto = WINFUNCTYPE(DWORD # Return Type
+ )
+GetLastErrorFlags = ()
+GetLastError = GetLastErrorProto(("GetLastError", windll.kernel32), GetLastErrorFlags)
+
+# CreateProcess()
+
+CreateProcessProto = WINFUNCTYPE(BOOL, # Return type
+ LPCWSTR, # lpApplicationName
+ LPWSTR, # lpCommandLine
+ LPVOID, # lpProcessAttributes
+ LPVOID, # lpThreadAttributes
+ BOOL, # bInheritHandles
+ DWORD, # dwCreationFlags
+ LPVOID, # lpEnvironment
+ LPCWSTR, # lpCurrentDirectory
+ LPSTARTUPINFO, # lpStartupInfo
+ LPPROCESS_INFORMATION # lpProcessInformation
+ )
+
+CreateProcessFlags = ((1, "lpApplicationName", None),
+ (1, "lpCommandLine"),
+ (1, "lpProcessAttributes", None),
+ (1, "lpThreadAttributes", None),
+ (1, "bInheritHandles", True),
+ (1, "dwCreationFlags", 0),
+ (1, "lpEnvironment", None),
+ (1, "lpCurrentDirectory", None),
+ (1, "lpStartupInfo"),
+ (2, "lpProcessInformation"))
+
+def ErrCheckCreateProcess(result, func, args):
+ ErrCheckBool(result, func, args)
+ # return a tuple (hProcess, hThread, dwProcessID, dwThreadID)
+ pi = args[9]
+ return AutoHANDLE(pi.hProcess), AutoHANDLE(pi.hThread), pi.dwProcessID, pi.dwThreadID
+
+CreateProcess = CreateProcessProto(("CreateProcessW", windll.kernel32),
+ CreateProcessFlags)
+CreateProcess.errcheck = ErrCheckCreateProcess
+
+# flags for CreateProcess
+CREATE_BREAKAWAY_FROM_JOB = 0x01000000
+CREATE_DEFAULT_ERROR_MODE = 0x04000000
+CREATE_NEW_CONSOLE = 0x00000010
+CREATE_NEW_PROCESS_GROUP = 0x00000200
+CREATE_NO_WINDOW = 0x08000000
+CREATE_SUSPENDED = 0x00000004
+CREATE_UNICODE_ENVIRONMENT = 0x00000400
+
+# Flags for IOCompletion ports (some of these would probably be defined if
+# we used the win32 extensions for python, but we don't want to do that if we
+# can help it.
+INVALID_HANDLE_VALUE = HANDLE(-1) # From winbase.h
+
+# Self Defined Constants for IOPort <--> Job Object communication
+COMPKEY_TERMINATE = c_ulong(0)
+COMPKEY_JOBOBJECT = c_ulong(1)
+
+# flags for job limit information
+# see http://msdn.microsoft.com/en-us/library/ms684147%28VS.85%29.aspx
+JOB_OBJECT_LIMIT_BREAKAWAY_OK = 0x00000800
+JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK = 0x00001000
+
+# Flags for Job Object Completion Port Message IDs from winnt.h
+# See also: http://msdn.microsoft.com/en-us/library/ms684141%28v=vs.85%29.aspx
+JOB_OBJECT_MSG_END_OF_JOB_TIME = 1
+JOB_OBJECT_MSG_END_OF_PROCESS_TIME = 2
+JOB_OBJECT_MSG_ACTIVE_PROCESS_LIMIT = 3
+JOB_OBJECT_MSG_ACTIVE_PROCESS_ZERO = 4
+JOB_OBJECT_MSG_NEW_PROCESS = 6
+JOB_OBJECT_MSG_EXIT_PROCESS = 7
+JOB_OBJECT_MSG_ABNORMAL_EXIT_PROCESS = 8
+JOB_OBJECT_MSG_PROCESS_MEMORY_LIMIT = 9
+JOB_OBJECT_MSG_JOB_MEMORY_LIMIT = 10
+
+# See winbase.h
+DEBUG_ONLY_THIS_PROCESS = 0x00000002
+DEBUG_PROCESS = 0x00000001
+DETACHED_PROCESS = 0x00000008
+
+# GetQueuedCompletionPortStatus - http://msdn.microsoft.com/en-us/library/aa364986%28v=vs.85%29.aspx
+GetQueuedCompletionStatusProto = WINFUNCTYPE(BOOL, # Return Type
+ HANDLE, # Completion Port
+ LPDWORD, # Msg ID
+ LPULONG, # Completion Key
+ LPULONG, # PID Returned from the call (may be null)
+ DWORD) # milliseconds to wait
+GetQueuedCompletionStatusFlags = ((1, "CompletionPort", INVALID_HANDLE_VALUE),
+ (1, "lpNumberOfBytes", None),
+ (1, "lpCompletionKey", None),
+ (1, "lpPID", None),
+ (1, "dwMilliseconds", 0))
+GetQueuedCompletionStatus = GetQueuedCompletionStatusProto(("GetQueuedCompletionStatus",
+ windll.kernel32),
+ GetQueuedCompletionStatusFlags)
+
+# CreateIOCompletionPort
+# Note that the completion key is just a number, not a pointer.
+CreateIoCompletionPortProto = WINFUNCTYPE(HANDLE, # Return Type
+ HANDLE, # File Handle
+ HANDLE, # Existing Completion Port
+ c_ulong, # Completion Key
+ DWORD # Number of Threads
+ )
+CreateIoCompletionPortFlags = ((1, "FileHandle", INVALID_HANDLE_VALUE),
+ (1, "ExistingCompletionPort", 0),
+ (1, "CompletionKey", c_ulong(0)),
+ (1, "NumberOfConcurrentThreads", 0))
+CreateIoCompletionPort = CreateIoCompletionPortProto(("CreateIoCompletionPort",
+ windll.kernel32),
+ CreateIoCompletionPortFlags)
+CreateIoCompletionPort.errcheck = ErrCheckHandle
+
+# SetInformationJobObject
+SetInformationJobObjectProto = WINFUNCTYPE(BOOL, # Return Type
+ HANDLE, # Job Handle
+ DWORD, # Type of Class next param is
+ LPVOID, # Job Object Class
+ DWORD # Job Object Class Length
+ )
+SetInformationJobObjectProtoFlags = ((1, "hJob", None),
+ (1, "JobObjectInfoClass", None),
+ (1, "lpJobObjectInfo", None),
+ (1, "cbJobObjectInfoLength", 0))
+SetInformationJobObject = SetInformationJobObjectProto(("SetInformationJobObject",
+ windll.kernel32),
+ SetInformationJobObjectProtoFlags)
+SetInformationJobObject.errcheck = ErrCheckBool
+
+# CreateJobObject()
+CreateJobObjectProto = WINFUNCTYPE(HANDLE, # Return type
+ LPVOID, # lpJobAttributes
+ LPCWSTR # lpName
+ )
+
+CreateJobObjectFlags = ((1, "lpJobAttributes", None),
+ (1, "lpName", None))
+
+CreateJobObject = CreateJobObjectProto(("CreateJobObjectW", windll.kernel32),
+ CreateJobObjectFlags)
+CreateJobObject.errcheck = ErrCheckHandle
+
+# AssignProcessToJobObject()
+
+AssignProcessToJobObjectProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hJob
+ HANDLE # hProcess
+ )
+AssignProcessToJobObjectFlags = ((1, "hJob"),
+ (1, "hProcess"))
+AssignProcessToJobObject = AssignProcessToJobObjectProto(
+ ("AssignProcessToJobObject", windll.kernel32),
+ AssignProcessToJobObjectFlags)
+AssignProcessToJobObject.errcheck = ErrCheckBool
+
+# GetCurrentProcess()
+# because os.getPid() is way too easy
+GetCurrentProcessProto = WINFUNCTYPE(HANDLE # Return type
+ )
+GetCurrentProcessFlags = ()
+GetCurrentProcess = GetCurrentProcessProto(
+ ("GetCurrentProcess", windll.kernel32),
+ GetCurrentProcessFlags)
+GetCurrentProcess.errcheck = ErrCheckHandle
+
+# IsProcessInJob()
+try:
+ IsProcessInJobProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # Process Handle
+ HANDLE, # Job Handle
+ LPBOOL # Result
+ )
+ IsProcessInJobFlags = ((1, "ProcessHandle"),
+ (1, "JobHandle", HANDLE(0)),
+ (2, "Result"))
+ IsProcessInJob = IsProcessInJobProto(
+ ("IsProcessInJob", windll.kernel32),
+ IsProcessInJobFlags)
+ IsProcessInJob.errcheck = ErrCheckBool
+except AttributeError:
+ # windows 2k doesn't have this API
+ def IsProcessInJob(process):
+ return False
+
+
+# ResumeThread()
+
+def ErrCheckResumeThread(result, func, args):
+ if result == -1:
+ raise WinError()
+
+ return args
+
+ResumeThreadProto = WINFUNCTYPE(DWORD, # Return type
+ HANDLE # hThread
+ )
+ResumeThreadFlags = ((1, "hThread"),)
+ResumeThread = ResumeThreadProto(("ResumeThread", windll.kernel32),
+ ResumeThreadFlags)
+ResumeThread.errcheck = ErrCheckResumeThread
+
+# TerminateProcess()
+
+TerminateProcessProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hProcess
+ UINT # uExitCode
+ )
+TerminateProcessFlags = ((1, "hProcess"),
+ (1, "uExitCode", 127))
+TerminateProcess = TerminateProcessProto(
+ ("TerminateProcess", windll.kernel32),
+ TerminateProcessFlags)
+TerminateProcess.errcheck = ErrCheckBool
+
+# TerminateJobObject()
+
+TerminateJobObjectProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hJob
+ UINT # uExitCode
+ )
+TerminateJobObjectFlags = ((1, "hJob"),
+ (1, "uExitCode", 127))
+TerminateJobObject = TerminateJobObjectProto(
+ ("TerminateJobObject", windll.kernel32),
+ TerminateJobObjectFlags)
+TerminateJobObject.errcheck = ErrCheckBool
+
+# WaitForSingleObject()
+
+WaitForSingleObjectProto = WINFUNCTYPE(DWORD, # Return type
+ HANDLE, # hHandle
+ DWORD, # dwMilliseconds
+ )
+WaitForSingleObjectFlags = ((1, "hHandle"),
+ (1, "dwMilliseconds", -1))
+WaitForSingleObject = WaitForSingleObjectProto(
+ ("WaitForSingleObject", windll.kernel32),
+ WaitForSingleObjectFlags)
+
+# http://msdn.microsoft.com/en-us/library/ms681381%28v=vs.85%29.aspx
+INFINITE = -1
+WAIT_TIMEOUT = 0x0102
+WAIT_OBJECT_0 = 0x0
+WAIT_ABANDONED = 0x0080
+
+# http://msdn.microsoft.com/en-us/library/ms683189%28VS.85%29.aspx
+STILL_ACTIVE = 259
+
+# Used when we terminate a process.
+ERROR_CONTROL_C_EXIT = 0x23c
+
+# GetExitCodeProcess()
+
+GetExitCodeProcessProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hProcess
+ LPDWORD, # lpExitCode
+ )
+GetExitCodeProcessFlags = ((1, "hProcess"),
+ (2, "lpExitCode"))
+GetExitCodeProcess = GetExitCodeProcessProto(
+ ("GetExitCodeProcess", windll.kernel32),
+ GetExitCodeProcessFlags)
+GetExitCodeProcess.errcheck = ErrCheckBool
+
+def CanCreateJobObject():
+ currentProc = GetCurrentProcess()
+ if IsProcessInJob(currentProc):
+ jobinfo = QueryInformationJobObject(HANDLE(0), 'JobObjectExtendedLimitInformation')
+ limitflags = jobinfo['BasicLimitInformation']['LimitFlags']
+ return bool(limitflags & JOB_OBJECT_LIMIT_BREAKAWAY_OK) or bool(limitflags & JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK)
+ else:
+ return True
+
+### testing functions
+
+def parent():
+ print 'Starting parent'
+ currentProc = GetCurrentProcess()
+ if IsProcessInJob(currentProc):
+ print >> sys.stderr, "You should not be in a job object to test"
+ sys.exit(1)
+ assert CanCreateJobObject()
+ print 'File: %s' % __file__
+ command = [sys.executable, __file__, '-child']
+ print 'Running command: %s' % command
+ process = Popen(command)
+ process.kill()
+ code = process.returncode
+ print 'Child code: %s' % code
+ assert code == 127
+
+def child():
+ print 'Starting child'
+ currentProc = GetCurrentProcess()
+ injob = IsProcessInJob(currentProc)
+ print "Is in a job?: %s" % injob
+ can_create = CanCreateJobObject()
+ print 'Can create job?: %s' % can_create
+ process = Popen('c:\\windows\\notepad.exe')
+ assert process._job
+ jobinfo = QueryInformationJobObject(process._job, 'JobObjectExtendedLimitInformation')
+ print 'Job info: %s' % jobinfo
+ limitflags = jobinfo['BasicLimitInformation']['LimitFlags']
+ print 'LimitFlags: %s' % limitflags
+ process.kill()
diff --git a/testing/mozharness/mozprocess/wpk.py b/testing/mozharness/mozprocess/wpk.py
new file mode 100644
index 000000000..a86f9bf22
--- /dev/null
+++ b/testing/mozharness/mozprocess/wpk.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from ctypes import sizeof, windll, addressof, c_wchar, create_unicode_buffer
+from ctypes.wintypes import DWORD, HANDLE
+
+PROCESS_TERMINATE = 0x0001
+PROCESS_QUERY_INFORMATION = 0x0400
+PROCESS_VM_READ = 0x0010
+
+def get_pids(process_name):
+ BIG_ARRAY = DWORD * 4096
+ processes = BIG_ARRAY()
+ needed = DWORD()
+
+ pids = []
+ result = windll.psapi.EnumProcesses(processes,
+ sizeof(processes),
+ addressof(needed))
+ if not result:
+ return pids
+
+ num_results = needed.value / sizeof(DWORD)
+
+ for i in range(num_results):
+ pid = processes[i]
+ process = windll.kernel32.OpenProcess(PROCESS_QUERY_INFORMATION |
+ PROCESS_VM_READ,
+ 0, pid)
+ if process:
+ module = HANDLE()
+ result = windll.psapi.EnumProcessModules(process,
+ addressof(module),
+ sizeof(module),
+ addressof(needed))
+ if result:
+ name = create_unicode_buffer(1024)
+ result = windll.psapi.GetModuleBaseNameW(process, module,
+ name, len(name))
+ # TODO: This might not be the best way to
+ # match a process name; maybe use a regexp instead.
+ if name.value.startswith(process_name):
+ pids.append(pid)
+ windll.kernel32.CloseHandle(module)
+ windll.kernel32.CloseHandle(process)
+
+ return pids
+
+def kill_pid(pid):
+ process = windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid)
+ if process:
+ windll.kernel32.TerminateProcess(process, 0)
+ windll.kernel32.CloseHandle(process)
diff --git a/testing/mozharness/requirements.txt b/testing/mozharness/requirements.txt
new file mode 100644
index 000000000..632355c54
--- /dev/null
+++ b/testing/mozharness/requirements.txt
@@ -0,0 +1,25 @@
+# These packages are needed for mozharness unit tests.
+# Output from 'pip freeze'; we may be able to use other versions of the below packages.
+Cython==0.14.1
+Fabric==1.6.0
+coverage==3.6
+distribute==0.6.35
+dulwich==0.8.7
+hg-git==0.4.0
+logilab-astng==0.24.2
+logilab-common==0.59.0
+mercurial==3.7.3
+mock==1.0.1
+nose==1.2.1
+ordereddict==1.1
+paramiko==1.10.0
+pycrypto==2.6
+pyflakes==0.6.1
+pylint==0.27.0
+simplejson==2.1.1
+unittest2==0.5.1
+virtualenv==1.5.1
+wsgiref==0.1.2
+urllib3==1.9.1
+google-api-python-client==1.5.1
+oauth2client==1.4.2
diff --git a/testing/mozharness/scripts/android_emulator_unittest.py b/testing/mozharness/scripts/android_emulator_unittest.py
new file mode 100644
index 000000000..2d17b9cb6
--- /dev/null
+++ b/testing/mozharness/scripts/android_emulator_unittest.py
@@ -0,0 +1,755 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import copy
+import datetime
+import glob
+import os
+import re
+import sys
+import signal
+import socket
+import subprocess
+import telnetlib
+import time
+import tempfile
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozprocess import ProcessHandler
+
+from mozharness.base.log import FATAL
+from mozharness.base.script import BaseScript, PreScriptAction, PostScriptAction
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.mozbase import MozbaseMixin
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+from mozharness.mozilla.testing.unittest import EmulatorMixin
+
+
+class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript, MozbaseMixin):
+ config_options = [[
+ ["--test-suite"],
+ {"action": "store",
+ "dest": "test_suite",
+ }
+ ], [
+ ["--adb-path"],
+ {"action": "store",
+ "dest": "adb_path",
+ "default": None,
+ "help": "Path to adb",
+ }
+ ], [
+ ["--total-chunk"],
+ {"action": "store",
+ "dest": "total_chunks",
+ "default": None,
+ "help": "Number of total chunks",
+ }
+ ], [
+ ["--this-chunk"],
+ {"action": "store",
+ "dest": "this_chunk",
+ "default": None,
+ "help": "Number of this chunk",
+ }
+ ]] + copy.deepcopy(testing_config_options) + \
+ copy.deepcopy(blobupload_config_options)
+
+ error_list = [
+ ]
+
+ virtualenv_requirements = [
+ ]
+
+ virtualenv_modules = [
+ ]
+
+ app_name = None
+
+ def __init__(self, require_config_file=False):
+ super(AndroidEmulatorTest, self).__init__(
+ config_options=self.config_options,
+ all_actions=['clobber',
+ 'read-buildbot-config',
+ 'setup-avds',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'install',
+ 'run-tests',
+ ],
+ default_actions=['clobber',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'install',
+ 'run-tests',
+ ],
+ require_config_file=require_config_file,
+ config={
+ 'virtualenv_modules': self.virtualenv_modules,
+ 'virtualenv_requirements': self.virtualenv_requirements,
+ 'require_test_zip': True,
+ # IP address of the host as seen from the emulator
+ 'remote_webserver': '10.0.2.2',
+ }
+ )
+
+ # these are necessary since self.config is read only
+ c = self.config
+ abs_dirs = self.query_abs_dirs()
+ self.adb_path = self.query_exe('adb')
+ self.installer_url = c.get('installer_url')
+ self.installer_path = c.get('installer_path')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+ self.test_manifest = c.get('test_manifest')
+ self.robocop_path = os.path.join(abs_dirs['abs_work_dir'], "robocop.apk")
+ self.minidump_stackwalk_path = c.get("minidump_stackwalk_path")
+ self.emulator = c.get('emulator')
+ self.test_suite = c.get('test_suite')
+ self.this_chunk = c.get('this_chunk')
+ self.total_chunks = c.get('total_chunks')
+ if self.test_suite not in self.config["suite_definitions"]:
+ # accept old-style test suite name like "mochitest-3"
+ m = re.match("(.*)-(\d*)", self.test_suite)
+ if m:
+ self.test_suite = m.group(1)
+ if self.this_chunk is None:
+ self.this_chunk = m.group(2)
+ self.sdk_level = None
+ self.xre_path = None
+
+ def _query_tests_dir(self):
+ dirs = self.query_abs_dirs()
+ try:
+ test_dir = self.config["suite_definitions"][self.test_suite]["testsdir"]
+ except:
+ test_dir = self.test_suite
+ return os.path.join(dirs['abs_test_install_dir'], test_dir)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(AndroidEmulatorTest, self).query_abs_dirs()
+ dirs = {}
+ dirs['abs_test_install_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'tests')
+ dirs['abs_xre_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'hostutils')
+ dirs['abs_modules_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'modules')
+ dirs['abs_blob_upload_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+ dirs['abs_emulator_dir'] = abs_dirs['abs_work_dir']
+ dirs['abs_mochitest_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'mochitest')
+ dirs['abs_marionette_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'harness', 'marionette_harness')
+ dirs['abs_marionette_tests_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'tests', 'testing',
+ 'marionette', 'harness', 'marionette_harness', 'tests')
+ dirs['abs_avds_dir'] = self.config.get("avds_dir", "/home/cltbld/.android")
+
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+ requirements = None
+ if os.path.isdir(dirs['abs_mochitest_dir']):
+ # mochitest is the only thing that needs this
+ requirements = os.path.join(dirs['abs_mochitest_dir'],
+ 'websocketprocessbridge',
+ 'websocketprocessbridge_requirements.txt')
+ elif self.test_suite == 'marionette':
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config', 'marionette_requirements.txt')
+ if requirements:
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+
+ def _launch_emulator(self):
+ env = self.query_env()
+
+ # Set $LD_LIBRARY_PATH to self.dirs['abs_work_dir'] so that
+ # the emulator picks up the symlink to libGL.so.1 that we
+ # constructed in start_emulator.
+ env['LD_LIBRARY_PATH'] = self.abs_dirs['abs_work_dir']
+
+ # Set environment variables to help emulator find the AVD.
+ # In newer versions of the emulator, ANDROID_AVD_HOME should
+ # point to the 'avd' directory.
+ # For older versions of the emulator, ANDROID_SDK_HOME should
+ # point to the directory containing the '.android' directory
+ # containing the 'avd' directory.
+ avd_home_dir = self.abs_dirs['abs_avds_dir']
+ env['ANDROID_AVD_HOME'] = os.path.join(avd_home_dir, 'avd')
+ env['ANDROID_SDK_HOME'] = os.path.abspath(os.path.join(avd_home_dir, '..'))
+
+ command = [
+ "emulator", "-avd", self.emulator["name"],
+ "-port", str(self.emulator["emulator_port"]),
+ ]
+ if "emulator_extra_args" in self.config:
+ command += self.config["emulator_extra_args"].split()
+
+ tmp_file = tempfile.NamedTemporaryFile(mode='w')
+ tmp_stdout = open(tmp_file.name, 'w')
+ self.info("Created temp file %s." % tmp_file.name)
+ self.info("Trying to start the emulator with this command: %s" % ' '.join(command))
+ proc = subprocess.Popen(command, stdout=tmp_stdout, stderr=tmp_stdout, env=env)
+ return {
+ "process": proc,
+ "tmp_file": tmp_file,
+ }
+
+ def _retry(self, max_attempts, interval, func, description, max_time = 0):
+ '''
+ Execute func until it returns True, up to max_attempts times, waiting for
+ interval seconds between each attempt. description is logged on each attempt.
+ If max_time is specified, no further attempts will be made once max_time
+ seconds have elapsed; this provides some protection for the case where
+ the run-time for func is long or highly variable.
+ '''
+ status = False
+ attempts = 0
+ if max_time > 0:
+ end_time = datetime.datetime.now() + datetime.timedelta(seconds = max_time)
+ else:
+ end_time = None
+ while attempts < max_attempts and not status:
+ if (end_time is not None) and (datetime.datetime.now() > end_time):
+ self.info("Maximum retry run-time of %d seconds exceeded; remaining attempts abandoned" % max_time)
+ break
+ if attempts != 0:
+ self.info("Sleeping %d seconds" % interval)
+ time.sleep(interval)
+ attempts += 1
+ self.info(">> %s: Attempt #%d of %d" % (description, attempts, max_attempts))
+ status = func()
+ return status
+
+ def _run_with_timeout(self, timeout, cmd):
+ timeout_cmd = ['timeout', '%s' % timeout] + cmd
+ return self._run_proc(timeout_cmd)
+
+ def _run_proc(self, cmd):
+ self.info('Running %s' % subprocess.list2cmdline(cmd))
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ out, err = p.communicate()
+ if out:
+ self.info('%s' % str(out.strip()))
+ if err:
+ self.info('stderr: %s' % str(err.strip()))
+ return out
+
+ def _telnet_cmd(self, telnet, command):
+ telnet.write('%s\n' % command)
+ result = telnet.read_until('OK', 10)
+ self.info('%s: %s' % (command, result))
+ return result
+
+ def _verify_adb(self):
+ self.info('Verifying adb connectivity')
+ self._run_with_timeout(180, [self.adb_path, 'wait-for-device'])
+ return True
+
+ def _verify_adb_device(self):
+ out = self._run_with_timeout(30, [self.adb_path, 'devices'])
+ if (self.emulator['device_id'] in out) and ("device" in out):
+ return True
+ return False
+
+ def _is_boot_completed(self):
+ boot_cmd = [self.adb_path, '-s', self.emulator['device_id'],
+ 'shell', 'getprop', 'sys.boot_completed']
+ out = self._run_with_timeout(30, boot_cmd)
+ if out.strip() == '1':
+ return True
+ return False
+
+ def _telnet_to_emulator(self):
+ port = self.emulator["emulator_port"]
+ telnet_ok = False
+ try:
+ tn = telnetlib.Telnet('localhost', port, 10)
+ if tn is not None:
+ self.info('Connected to port %d' % port)
+ res = tn.read_until('OK', 10)
+ self.info(res)
+ self._telnet_cmd(tn, 'avd status')
+ self._telnet_cmd(tn, 'redir list')
+ self._telnet_cmd(tn, 'network status')
+ tn.write('quit\n')
+ tn.read_all()
+ telnet_ok = True
+ else:
+ self.warning('Unable to connect to port %d' % port)
+ except socket.error, e:
+ self.info('Trying again after socket error: %s' % str(e))
+ pass
+ except EOFError:
+ self.info('Trying again after EOF')
+ pass
+ except:
+ self.info('Trying again after unexpected exception')
+ pass
+ finally:
+ if tn is not None:
+ tn.close()
+ return telnet_ok
+
+ def _verify_emulator(self):
+ adb_ok = self._verify_adb()
+ if not adb_ok:
+ self.warning('Unable to communicate with adb')
+ return False
+ adb_device_ok = self._retry(4, 30, self._verify_adb_device, "Verify emulator visible to adb")
+ if not adb_device_ok:
+ self.warning('Unable to communicate with emulator via adb')
+ return False
+ boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed", max_time = 330)
+ if not boot_ok:
+ self.warning('Unable to verify Android boot completion')
+ return False
+ telnet_ok = self._retry(4, 30, self._telnet_to_emulator, "Verify telnet to emulator")
+ if not telnet_ok:
+ self.warning('Unable to telnet to emulator on port %d' % self.emulator["emulator_port"])
+ return False
+ return True
+
+ def _verify_emulator_and_restart_on_fail(self):
+ emulator_ok = self._verify_emulator()
+ if not emulator_ok:
+ self._dump_host_state()
+ self._screenshot("emulator-startup-screenshot-")
+ self._kill_processes(self.config["emulator_process_name"])
+ self._run_proc(['ps', '-ef'])
+ self._dump_emulator_log()
+ # remove emulator tmp files
+ for dir in glob.glob("/tmp/android-*"):
+ self.rmtree(dir)
+ self._restart_adbd()
+ time.sleep(5)
+ self.emulator_proc = self._launch_emulator()
+ return emulator_ok
+
+ def _install_fennec_apk(self):
+ install_ok = False
+ if int(self.sdk_level) >= 23:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.installer_path]
+ else:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.installer_path]
+ out = self._run_with_timeout(300, cmd)
+ if 'Success' in out:
+ install_ok = True
+ return install_ok
+
+ def _install_robocop_apk(self):
+ install_ok = False
+ if int(self.sdk_level) >= 23:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.robocop_path]
+ else:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.robocop_path]
+ out = self._run_with_timeout(300, cmd)
+ if 'Success' in out:
+ install_ok = True
+ return install_ok
+
+ def _dump_host_state(self):
+ self._run_proc(['ps', '-ef'])
+ self._run_proc(['netstat', '-a', '-p', '-n', '-t', '-u'])
+
+ def _dump_emulator_log(self):
+ self.info("##### %s emulator log begins" % self.emulator["name"])
+ output = self.read_from_file(self.emulator_proc["tmp_file"].name, verbose=False)
+ if output:
+ self.info(output)
+ self.info("##### %s emulator log ends" % self.emulator["name"])
+
+ def _kill_processes(self, process_name):
+ p = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)
+ out, err = p.communicate()
+ self.info("Let's kill every process called %s" % process_name)
+ for line in out.splitlines():
+ if process_name in line:
+ pid = int(line.split(None, 1)[0])
+ self.info("Killing pid %d." % pid)
+ os.kill(pid, signal.SIGKILL)
+
+ def _restart_adbd(self):
+ self._run_with_timeout(30, [self.adb_path, 'kill-server'])
+ self._run_with_timeout(30, [self.adb_path, 'start-server'])
+
+ def _screenshot(self, prefix):
+ """
+ Save a screenshot of the entire screen to the blob upload directory.
+ """
+ dirs = self.query_abs_dirs()
+ utility = os.path.join(self.xre_path, "screentopng")
+ if not os.path.exists(utility):
+ self.warning("Unable to take screenshot: %s does not exist" % utility)
+ return
+ try:
+ tmpfd, filename = tempfile.mkstemp(prefix=prefix, suffix='.png',
+ dir=dirs['abs_blob_upload_dir'])
+ os.close(tmpfd)
+ self.info("Taking screenshot with %s; saving to %s" % (utility, filename))
+ subprocess.call([utility, filename], env=self.query_env())
+ except OSError, err:
+ self.warning("Failed to take screenshot: %s" % err.strerror)
+
+ def _query_package_name(self):
+ if self.app_name is None:
+ #find appname from package-name.txt - assumes download-and-extract has completed successfully
+ apk_dir = self.abs_dirs['abs_work_dir']
+ self.apk_path = os.path.join(apk_dir, self.installer_path)
+ unzip = self.query_exe("unzip")
+ package_path = os.path.join(apk_dir, 'package-name.txt')
+ unzip_cmd = [unzip, '-q', '-o', self.apk_path]
+ self.run_command(unzip_cmd, cwd=apk_dir, halt_on_failure=True)
+ self.app_name = str(self.read_from_file(package_path, verbose=True)).rstrip()
+ return self.app_name
+
+ def preflight_install(self):
+ # in the base class, this checks for mozinstall, but we don't use it
+ pass
+
+ def _build_command(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ if self.test_suite not in self.config["suite_definitions"]:
+ self.fatal("Key '%s' not defined in the config!" % self.test_suite)
+
+ cmd = [
+ self.query_python_path('python'),
+ '-u',
+ os.path.join(
+ self._query_tests_dir(),
+ self.config["suite_definitions"][self.test_suite]["run_filename"]
+ ),
+ ]
+
+ raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_raw.log' % self.test_suite)
+
+ error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_errorsummary.log' % self.test_suite)
+ str_format_values = {
+ 'app': self._query_package_name(),
+ 'remote_webserver': c['remote_webserver'],
+ 'xre_path': self.xre_path,
+ 'utility_path': self.xre_path,
+ 'http_port': self.emulator['http_port'],
+ 'ssl_port': self.emulator['ssl_port'],
+ 'certs_path': os.path.join(dirs['abs_work_dir'], 'tests/certs'),
+ # TestingMixin._download_and_extract_symbols() will set
+ # self.symbols_path when downloading/extracting.
+ 'symbols_path': self.symbols_path,
+ 'modules_dir': dirs['abs_modules_dir'],
+ 'installer_path': self.installer_path,
+ 'raw_log_file': raw_log_file,
+ 'error_summary_file': error_summary_file,
+ 'dm_trans': c['device_manager'],
+ # marionette options
+ 'address': c.get('marionette_address'),
+ 'gecko_log': os.path.join(dirs["abs_blob_upload_dir"], 'gecko.log'),
+ 'test_manifest': os.path.join(
+ dirs['abs_marionette_tests_dir'],
+ self.config.get('marionette_test_manifest', '')
+ ),
+ }
+ for option in self.config["suite_definitions"][self.test_suite]["options"]:
+ opt = option.split('=')[0]
+ # override configured chunk options with script args, if specified
+ if opt == '--this-chunk' and self.this_chunk is not None:
+ continue
+ if opt == '--total-chunks' and self.total_chunks is not None:
+ continue
+ cmd.extend([option % str_format_values])
+
+ if self.this_chunk is not None:
+ cmd.extend(['--this-chunk', self.this_chunk])
+ if self.total_chunks is not None:
+ cmd.extend(['--total-chunks', self.total_chunks])
+
+ try_options, try_tests = self.try_args(self.test_suite)
+ cmd.extend(try_options)
+ cmd.extend(self.query_tests_args(
+ self.config["suite_definitions"][self.test_suite].get("tests"),
+ None,
+ try_tests))
+
+ return cmd
+
+ def _get_repo_url(self, path):
+ """
+ Return a url for a file (typically a tooltool manifest) in this hg repo
+ and using this revision (or mozilla-central/default if repo/rev cannot
+ be determined).
+
+ :param path specifies the directory path to the file of interest.
+ """
+ if 'GECKO_HEAD_REPOSITORY' in os.environ and 'GECKO_HEAD_REV' in os.environ:
+ # probably taskcluster
+ repo = os.environ['GECKO_HEAD_REPOSITORY']
+ revision = os.environ['GECKO_HEAD_REV']
+ elif self.buildbot_config and 'properties' in self.buildbot_config:
+ # probably buildbot
+ repo = 'https://hg.mozilla.org/%s' % self.buildbot_config['properties']['repo_path']
+ revision = self.buildbot_config['properties']['revision']
+ else:
+ # something unexpected!
+ repo = 'https://hg.mozilla.org/mozilla-central'
+ revision = 'default'
+ self.warning('Unable to find repo/revision for manifest; using mozilla-central/default')
+ url = '%s/raw-file/%s/%s' % (
+ repo,
+ revision,
+ path)
+ return url
+
+ def _tooltool_fetch(self, url, dir):
+ c = self.config
+
+ manifest_path = self.download_file(
+ url,
+ file_name='releng.manifest',
+ parent_dir=dir
+ )
+
+ if not os.path.exists(manifest_path):
+ self.fatal("Could not retrieve manifest needed to retrieve "
+ "artifacts from %s" % manifest_path)
+
+ self.tooltool_fetch(manifest_path,
+ output_dir=dir,
+ cache=c.get("tooltool_cache", None))
+
+ ##########################################
+ ### Actions for AndroidEmulatorTest ###
+ ##########################################
+ def setup_avds(self):
+ '''
+ If tooltool cache mechanism is enabled, the cached version is used by
+ the fetch command. If the manifest includes an "unpack" field, tooltool
+ will unpack all compressed archives mentioned in the manifest.
+ '''
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ # FIXME
+ # Clobbering and re-unpacking would not be needed if we had a way to
+ # check whether the unpacked content already present match the
+ # contents of the tar ball
+ self.rmtree(dirs['abs_avds_dir'])
+ self.mkdir_p(dirs['abs_avds_dir'])
+ if 'avd_url' in c:
+ # Intended for experimental setups to evaluate an avd prior to
+ # tooltool deployment.
+ url = c['avd_url']
+ self.download_unpack(url, dirs['abs_avds_dir'])
+ else:
+ url = self._get_repo_url(c["tooltool_manifest_path"])
+ self._tooltool_fetch(url, dirs['abs_avds_dir'])
+
+ avd_home_dir = self.abs_dirs['abs_avds_dir']
+ if avd_home_dir != "/home/cltbld/.android":
+ # Modify the downloaded avds to point to the right directory.
+ cmd = [
+ 'bash', '-c',
+ 'sed -i "s|/home/cltbld/.android|%s|" %s/test-*.ini' %
+ (avd_home_dir, os.path.join(avd_home_dir, 'avd'))
+ ]
+ proc = ProcessHandler(cmd)
+ proc.run()
+ proc.wait()
+
+ def start_emulator(self):
+ '''
+ Starts the emulator
+ '''
+ if 'emulator_url' in self.config or 'emulator_manifest' in self.config or 'tools_manifest' in self.config:
+ self.install_emulator()
+
+ if not os.path.isfile(self.adb_path):
+ self.fatal("The adb binary '%s' is not a valid file!" % self.adb_path)
+ self._restart_adbd()
+
+ if not self.config.get("developer_mode"):
+ # We kill compiz because it sometimes prevents us from starting the emulator
+ self._kill_processes("compiz")
+ self._kill_processes("xpcshell")
+
+ # We add a symlink for libGL.so because the emulator dlopen()s it by that name
+ # even though the installed library on most systems without dev packages is
+ # libGL.so.1
+ linkfile = os.path.join(self.abs_dirs['abs_work_dir'], "libGL.so")
+ self.info("Attempting to establish symlink for %s" % linkfile)
+ try:
+ os.unlink(linkfile)
+ except OSError:
+ pass
+ for libdir in ["/usr/lib/x86_64-linux-gnu/mesa",
+ "/usr/lib/i386-linux-gnu/mesa",
+ "/usr/lib/mesa"]:
+ libfile = os.path.join(libdir, "libGL.so.1")
+ if os.path.exists(libfile):
+ self.info("Symlinking %s -> %s" % (linkfile, libfile))
+ self.mkdir_p(self.abs_dirs['abs_work_dir'])
+ os.symlink(libfile, linkfile)
+ break
+ self.emulator_proc = self._launch_emulator()
+
+ def verify_emulator(self):
+ '''
+ Check to see if the emulator can be contacted via adb and telnet.
+ If any communication attempt fails, kill the emulator, re-launch, and re-check.
+ '''
+ self.mkdir_p(self.query_abs_dirs()['abs_blob_upload_dir'])
+ max_restarts = 5
+ emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail, "Check emulator")
+ if not emulator_ok:
+ self.fatal('INFRA-ERROR: Unable to start emulator after %d attempts' % max_restarts)
+ # Start logcat for the emulator. The adb process runs until the
+ # corresponding emulator is killed. Output is written directly to
+ # the blobber upload directory so that it is uploaded automatically
+ # at the end of the job.
+ logcat_filename = 'logcat-%s.log' % self.emulator["device_id"]
+ logcat_path = os.path.join(self.abs_dirs['abs_blob_upload_dir'], logcat_filename)
+ logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S ExchangeService:S > %s &' % \
+ (self.adb_path, self.emulator["device_id"], logcat_path)
+ self.info(logcat_cmd)
+ os.system(logcat_cmd)
+ # Get a post-boot emulator process list for diagnostics
+ ps_cmd = [self.adb_path, '-s', self.emulator["device_id"], 'shell', 'ps']
+ self._run_with_timeout(30, ps_cmd)
+
+ def download_and_extract(self):
+ """
+ Download and extract fennec APK, tests.zip, host utils, and robocop (if required).
+ """
+ super(AndroidEmulatorTest, self).download_and_extract(suite_categories=[self.test_suite])
+ dirs = self.query_abs_dirs()
+ if self.test_suite.startswith('robocop'):
+ robocop_url = self.installer_url[:self.installer_url.rfind('/')] + '/robocop.apk'
+ self.info("Downloading robocop...")
+ self.download_file(robocop_url, 'robocop.apk', dirs['abs_work_dir'], error_level=FATAL)
+ self.rmtree(dirs['abs_xre_dir'])
+ self.mkdir_p(dirs['abs_xre_dir'])
+ if self.config["hostutils_manifest_path"]:
+ url = self._get_repo_url(self.config["hostutils_manifest_path"])
+ self._tooltool_fetch(url, dirs['abs_xre_dir'])
+ for p in glob.glob(os.path.join(dirs['abs_xre_dir'], 'host-utils-*')):
+ if os.path.isdir(p) and os.path.isfile(os.path.join(p, 'xpcshell')):
+ self.xre_path = p
+ if not self.xre_path:
+ self.fatal("xre path not found in %s" % dirs['abs_xre_dir'])
+ else:
+ self.fatal("configure hostutils_manifest_path!")
+
+ def install(self):
+ """
+ Install APKs on the emulator
+ """
+ assert self.installer_path is not None, \
+ "Either add installer_path to the config or use --installer-path."
+ install_needed = self.config["suite_definitions"][self.test_suite].get("install")
+ if install_needed == False:
+ self.info("Skipping apk installation for %s" % self.test_suite)
+ return
+
+ self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s', self.emulator['device_id'],
+ 'shell', 'getprop', 'ro.build.version.sdk'])
+
+ # Install Fennec
+ install_ok = self._retry(3, 30, self._install_fennec_apk, "Install Fennec APK")
+ if not install_ok:
+ self.fatal('INFRA-ERROR: Failed to install %s on %s' % (self.installer_path, self.emulator["name"]))
+
+ # Install Robocop if required
+ if self.test_suite.startswith('robocop'):
+ install_ok = self._retry(3, 30, self._install_robocop_apk, "Install Robocop APK")
+ if not install_ok:
+ self.fatal('INFRA-ERROR: Failed to install %s on %s' % (self.robocop_path, self.emulator["name"]))
+
+ self.info("Finished installing apps for %s" % self.emulator["name"])
+
+ def run_tests(self):
+ """
+ Run the tests
+ """
+ cmd = self._build_command()
+
+ try:
+ cwd = self._query_tests_dir()
+ except:
+ self.fatal("Don't know how to run --test-suite '%s'!" % self.test_suite)
+ env = self.query_env()
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+
+ self.info("Running on %s the command %s" % (self.emulator["name"], subprocess.list2cmdline(cmd)))
+ self.info("##### %s log begins" % self.test_suite)
+
+ # TinderBoxPrintRe does not know about the '-debug' categories
+ aliases = {
+ 'reftest-debug': 'reftest',
+ 'jsreftest-debug': 'jsreftest',
+ 'crashtest-debug': 'crashtest',
+ }
+ suite_category = aliases.get(self.test_suite, self.test_suite)
+ parser = self.get_test_output_parser(
+ suite_category,
+ config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list)
+ self.run_command(cmd, cwd=cwd, env=env, output_parser=parser)
+ tbpl_status, log_level = parser.evaluate_parser(0)
+ parser.append_tinderboxprint_line(self.test_suite)
+
+ self.info("##### %s log ends" % self.test_suite)
+ self._dump_emulator_log()
+ self.buildbot_status(tbpl_status, level=log_level)
+
+ @PostScriptAction('run-tests')
+ def stop_emulator(self, action, success=None):
+ '''
+ Report emulator health, then make sure that the emulator has been stopped
+ '''
+ self._verify_emulator()
+ self._kill_processes(self.config["emulator_process_name"])
+
+ def upload_blobber_files(self):
+ '''
+ Override BlobUploadMixin.upload_blobber_files to ensure emulator is killed
+ first (if the emulator is still running, logcat may still be running, which
+ may lock the blob upload directory, causing a hang).
+ '''
+ if self.config.get('blob_upload_branch'):
+ # Except on interactive workers, we want the emulator to keep running
+ # after the script is finished. So only kill it if blobber would otherwise
+ # have run anyway (it doesn't get run on interactive workers).
+ self._kill_processes(self.config["emulator_process_name"])
+ super(AndroidEmulatorTest, self).upload_blobber_files()
+
+if __name__ == '__main__':
+ emulatorTest = AndroidEmulatorTest()
+ emulatorTest.run_and_exit()
diff --git a/testing/mozharness/scripts/bouncer_submitter.py b/testing/mozharness/scripts/bouncer_submitter.py
new file mode 100755
index 000000000..eaa43e851
--- /dev/null
+++ b/testing/mozharness/scripts/bouncer_submitter.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.bouncer.submitter import BouncerSubmitterMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+
+
+class BouncerSubmitter(BaseScript, PurgeMixin, BouncerSubmitterMixin, BuildbotMixin):
+ config_options = [
+ [["--repo"], {
+ "dest": "repo",
+ "help": "Specify source repo, e.g. releases/mozilla-beta",
+ }],
+ [["--revision"], {
+ "dest": "revision",
+ "help": "Source revision/tag used to fetch shipped-locales",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Current version",
+ }],
+ [["--previous-version"], {
+ "dest": "prev_versions",
+ "action": "extend",
+ "help": "Previous version(s)",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of version",
+ }],
+ [["--bouncer-api-prefix"], {
+ "dest": "bouncer-api-prefix",
+ "help": "Bouncer admin API URL prefix",
+ }],
+ [["--credentials-file"], {
+ "dest": "credentials_file",
+ "help": "File containing Bouncer credentials",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ # other stuff
+ all_actions=[
+ 'clobber',
+ 'download-shipped-locales',
+ 'submit',
+ ],
+ default_actions=[
+ 'clobber',
+ 'download-shipped-locales',
+ 'submit',
+ ],
+ config={
+ 'buildbot_json_path' : 'buildprops.json'
+ }
+ )
+ self.locales = None
+ self.credentials = None
+
+ def _pre_config_lock(self, rw_config):
+ super(BouncerSubmitter, self)._pre_config_lock(rw_config)
+
+ #override properties from buildbot properties here as defined by taskcluster properties
+ self.read_buildbot_config()
+
+ #check if release promotion is true first before overwriting these properties
+ if self.buildbot_config["properties"].get("release_promotion"):
+ for prop in ['product', 'version', 'build_number', 'revision', 'bouncer_submitter_config', ]:
+ if self.buildbot_config["properties"].get(prop):
+ self.info("Overriding %s with %s" % (prop, self.buildbot_config["properties"].get(prop)))
+ self.config[prop] = self.buildbot_config["properties"].get(prop)
+ if self.buildbot_config["properties"].get("partial_versions"):
+ self.config["prev_versions"] = self.buildbot_config["properties"].get("partial_versions").split(", ")
+
+ for opt in ["version", "credentials_file", "bouncer-api-prefix"]:
+ if opt not in self.config:
+ self.fatal("%s must be specified" % opt)
+ if self.need_shipped_locales():
+ for opt in ["shipped-locales-url", "repo", "revision"]:
+ if opt not in self.config:
+ self.fatal("%s must be specified" % opt)
+
+ def need_shipped_locales(self):
+ return any(e.get("add-locales") for e in
+ self.config["products"].values())
+
+ def query_shipped_locales_path(self):
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs["abs_work_dir"], "shipped-locales")
+
+ def download_shipped_locales(self):
+ if not self.need_shipped_locales():
+ self.info("No need to download shipped-locales")
+ return
+
+ replace_dict = {"revision": self.config["revision"],
+ "repo": self.config["repo"]}
+ url = self.config["shipped-locales-url"] % replace_dict
+ dirs = self.query_abs_dirs()
+ self.mkdir_p(dirs["abs_work_dir"])
+ if not self.download_file(url=url,
+ file_name=self.query_shipped_locales_path()):
+ self.fatal("Unable to fetch shipped-locales from %s" % url)
+ # populate the list
+ self.load_shipped_locales()
+
+ def load_shipped_locales(self):
+ if self.locales:
+ return self.locales
+ content = self.read_from_file(self.query_shipped_locales_path())
+ locales = []
+ for line in content.splitlines():
+ locale = line.split()[0]
+ if locale:
+ locales.append(locale)
+ self.locales = locales
+ return self.locales
+
+ def submit(self):
+ subs = {
+ "version": self.config["version"]
+ }
+ if self.config.get("build_number"):
+ subs["build_number"] = self.config["build_number"]
+
+ for product, pr_config in sorted(self.config["products"].items()):
+ product_name = pr_config["product-name"] % subs
+ if self.product_exists(product_name):
+ self.warning("Product %s already exists. Skipping..." %
+ product_name)
+ continue
+ self.info("Adding %s..." % product)
+ self.api_add_product(
+ product_name=product_name,
+ add_locales=pr_config.get("add-locales"),
+ ssl_only=pr_config.get("ssl-only"))
+ self.info("Adding paths...")
+ for platform, pl_config in sorted(pr_config["paths"].items()):
+ bouncer_platform = pl_config["bouncer-platform"]
+ path = pl_config["path"] % subs
+ self.info("%s (%s): %s" % (platform, bouncer_platform, path))
+ self.api_add_location(product_name, bouncer_platform, path)
+
+ # Add partial updates
+ if "partials" in self.config and self.config.get("prev_versions"):
+ self.submit_partials()
+
+ def submit_partials(self):
+ subs = {
+ "version": self.config["version"]
+ }
+ if self.config.get("build_number"):
+ subs["build_number"] = self.config["build_number"]
+ prev_versions = self.config.get("prev_versions")
+ for product, part_config in sorted(self.config["partials"].items()):
+ product_name_tmpl = part_config["product-name"]
+ for prev_version in prev_versions:
+ prev_version, prev_build_number = prev_version.split("build")
+ subs["prev_version"] = prev_version
+ subs["prev_build_number"] = prev_build_number
+ product_name = product_name_tmpl % subs
+ if self.product_exists(product_name):
+ self.warning("Product %s already exists. Skipping..." %
+ product_name)
+ continue
+ self.info("Adding partial updates for %s" % product_name)
+ self.api_add_product(
+ product_name=product_name,
+ add_locales=part_config.get("add-locales"),
+ ssl_only=part_config.get("ssl-only"))
+ for platform, pl_config in sorted(part_config["paths"].items()):
+ bouncer_platform = pl_config["bouncer-platform"]
+ path = pl_config["path"] % subs
+ self.info("%s (%s): %s" % (platform, bouncer_platform, path))
+ self.api_add_location(product_name, bouncer_platform, path)
+
+
+if __name__ == '__main__':
+ myScript = BouncerSubmitter()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/configtest.py b/testing/mozharness/scripts/configtest.py
new file mode 100755
index 000000000..5db684f0a
--- /dev/null
+++ b/testing/mozharness/scripts/configtest.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""configtest.py
+
+Verify the .json and .py files in the configs/ directory are well-formed.
+Further tests to verify validity would be desirable.
+
+This is also a good example script to look at to understand mozharness.
+"""
+
+import os
+import pprint
+import sys
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+
+# ConfigTest {{{1
+class ConfigTest(BaseScript):
+ config_options = [[
+ ["--test-file",],
+ {"action": "extend",
+ "dest": "test_files",
+ "help": "Specify which config files to test"
+ }
+ ]]
+
+ def __init__(self, require_config_file=False):
+ self.config_files = []
+ BaseScript.__init__(self, config_options=self.config_options,
+ all_actions=['list-config-files',
+ 'test-json-configs',
+ 'test-python-configs',
+ 'summary',
+ ],
+ default_actions=['test-json-configs',
+ 'test-python-configs',
+ 'summary',
+ ],
+ require_config_file=require_config_file)
+
+ def query_config_files(self):
+ """This query method, much like others, caches its runtime
+ settings in self.VAR so we don't have to figure out config_files
+ multiple times.
+ """
+ if self.config_files:
+ return self.config_files
+ c = self.config
+ if 'test_files' in c:
+ self.config_files = c['test_files']
+ return self.config_files
+ self.debug("No --test-file(s) specified; defaulting to crawling the configs/ directory.")
+ config_files = []
+ for root, dirs, files in os.walk(os.path.join(sys.path[0], "..",
+ "configs")):
+ for name in files:
+ # Hardcode =P
+ if name.endswith(".json") or name.endswith(".py"):
+ if not name.startswith("test_malformed"):
+ config_files.append(os.path.join(root, name))
+ self.config_files = config_files
+ return self.config_files
+
+ def list_config_files(self):
+ """ Non-default action that is mainly here to demonstrate how
+ non-default actions work in a mozharness script.
+ """
+ config_files = self.query_config_files()
+ for config_file in config_files:
+ self.info(config_file)
+
+ def test_json_configs(self):
+ """ Currently only "is this well-formed json?"
+
+ """
+ config_files = self.query_config_files()
+ filecount = [0, 0]
+ for config_file in config_files:
+ if config_file.endswith(".json"):
+ filecount[0] += 1
+ self.info("Testing %s." % config_file)
+ contents = self.read_from_file(config_file, verbose=False)
+ try:
+ json.loads(contents)
+ except ValueError:
+ self.add_summary("%s is invalid json." % config_file,
+ level="error")
+ self.error(pprint.pformat(sys.exc_info()[1]))
+ else:
+ self.info("Good.")
+ filecount[1] += 1
+ if filecount[0]:
+ self.add_summary("%d of %d json config files were good." %
+ (filecount[1], filecount[0]))
+ else:
+ self.add_summary("No json config files to test.")
+
+ def test_python_configs(self):
+ """Currently only "will this give me a config dictionary?"
+
+ """
+ config_files = self.query_config_files()
+ filecount = [0, 0]
+ for config_file in config_files:
+ if config_file.endswith(".py"):
+ filecount[0] += 1
+ self.info("Testing %s." % config_file)
+ global_dict = {}
+ local_dict = {}
+ try:
+ execfile(config_file, global_dict, local_dict)
+ except:
+ self.add_summary("%s is invalid python." % config_file,
+ level="error")
+ self.error(pprint.pformat(sys.exc_info()[1]))
+ else:
+ if 'config' in local_dict and isinstance(local_dict['config'], dict):
+ self.info("Good.")
+ filecount[1] += 1
+ else:
+ self.add_summary("%s is valid python, but doesn't create a config dictionary." %
+ config_file, level="error")
+ if filecount[0]:
+ self.add_summary("%d of %d python config files were good." %
+ (filecount[1], filecount[0]))
+ else:
+ self.add_summary("No python config files to test.")
+
+# __main__ {{{1
+if __name__ == '__main__':
+ config_test = ConfigTest()
+ config_test.run_and_exit()
diff --git a/testing/mozharness/scripts/desktop_l10n.py b/testing/mozharness/scripts/desktop_l10n.py
new file mode 100755
index 000000000..0626ce35b
--- /dev/null
+++ b/testing/mozharness/scripts/desktop_l10n.py
@@ -0,0 +1,1152 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""desktop_l10n.py
+
+This script manages Desktop repacks for nightly builds.
+"""
+import os
+import re
+import sys
+import time
+import shlex
+import subprocess
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import BaseErrorList, MakefileErrorList
+from mozharness.base.script import BaseScript
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.building.buildbase import MakeUploadOutputParser
+from mozharness.mozilla.l10n.locales import LocalesMixin
+from mozharness.mozilla.mar import MarMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.mozilla.signing import SigningMixin
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.mozilla.taskcluster_helper import Taskcluster
+from mozharness.base.python import VirtualenvMixin
+from mozharness.mozilla.mock import ERROR_MSGS
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+
+# needed by _map
+SUCCESS = 0
+FAILURE = 1
+
+SUCCESS_STR = "Success"
+FAILURE_STR = "Failed"
+
+# when running get_output_form_command, pymake has some extra output
+# that needs to be filtered out
+PyMakeIgnoreList = [
+ re.compile(r'''.*make\.py(?:\[\d+\])?: Entering directory'''),
+ re.compile(r'''.*make\.py(?:\[\d+\])?: Leaving directory'''),
+]
+
+
+# mandatory configuration options, without them, this script will not work
+# it's a list of values that are already known before starting a build
+configuration_tokens = ('branch',
+ 'platform',
+ 'update_platform',
+ 'update_channel',
+ 'ssh_key_dir',
+ 'stage_product',
+ 'upload_environment',
+ )
+# some other values such as "%(version)s", "%(buildid)s", ...
+# are defined at run time and they cannot be enforced in the _pre_config_lock
+# phase
+runtime_config_tokens = ('buildid', 'version', 'locale', 'from_buildid',
+ 'abs_objdir', 'abs_merge_dir', 'revision',
+ 'to_buildid', 'en_us_binary_url', 'mar_tools_url',
+ 'post_upload_extra', 'who')
+
+# DesktopSingleLocale {{{1
+class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
+ VCSMixin, SigningMixin, PurgeMixin, BaseScript,
+ BalrogMixin, MarMixin, VirtualenvMixin, TransferMixin):
+ """Manages desktop repacks"""
+ config_options = [[
+ ['--balrog-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the balrog configuration file"}
+ ], [
+ ['--branch-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the branch configuration file"}
+ ], [
+ ['--environment-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the environment (staging, production, ...) configuration file"}
+ ], [
+ ['--platform-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the platform configuration file"}
+ ], [
+ ['--locale', ],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to sign and update. Optionally pass"
+ " revision separated by colon, en-GB:default."}
+ ], [
+ ['--locales-file', ],
+ {"action": "store",
+ "dest": "locales_file",
+ "type": "string",
+ "help": "Specify a file to determine which locales to sign and update"}
+ ], [
+ ['--tag-override', ],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"}
+ ], [
+ ['--revision', ],
+ {"action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "Override the gecko revision to use (otherwise use buildbot supplied"
+ " value, or en-US revision) "}
+ ], [
+ ['--user-repo-override', ],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"}
+ ], [
+ ['--release-config-file', ],
+ {"action": "store",
+ "dest": "release_config_file",
+ "type": "string",
+ "help": "Specify the release config file to use"}
+ ], [
+ ['--this-chunk', ],
+ {"action": "store",
+ "dest": "this_locale_chunk",
+ "type": "int",
+ "help": "Specify which chunk of locales to run"}
+ ], [
+ ['--total-chunks', ],
+ {"action": "store",
+ "dest": "total_locale_chunks",
+ "type": "int",
+ "help": "Specify the total number of chunks of locales"}
+ ], [
+ ['--en-us-installer-url', ],
+ {"action": "store",
+ "dest": "en_us_installer_url",
+ "type": "string",
+ "help": "Specify the url of the en-us binary"}
+ ], [
+ ["--disable-mock"], {
+ "dest": "disable_mock",
+ "action": "store_true",
+ "help": "do not run under mock despite what gecko-config says"}
+ ]]
+
+ def __init__(self, require_config_file=True):
+ # fxbuild style:
+ buildscript_kwargs = {
+ 'all_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "funsize-props",
+ "submit-to-balrog",
+ "summary",
+ ],
+ 'config': {
+ "buildbot_json_path": "buildprops.json",
+ "ignore_locales": ["en-US"],
+ "locales_dir": "browser/locales",
+ "update_mar_dir": "dist/update",
+ "buildid_section": "App",
+ "buildid_option": "BuildID",
+ "application_ini": "application.ini",
+ "log_name": "single_locale",
+ "clobber_file": 'CLOBBER',
+ "appName": "Firefox",
+ "hashType": "sha512",
+ "taskcluster_credentials_file": "oauth.txt",
+ 'virtualenv_modules': [
+ 'requests==2.8.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.26',
+ ],
+ 'virtualenv_path': 'venv',
+ },
+ }
+ #
+
+ LocalesMixin.__init__(self)
+ BaseScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ **buildscript_kwargs
+ )
+
+ self.buildid = None
+ self.make_ident_output = None
+ self.bootstrap_env = None
+ self.upload_env = None
+ self.revision = None
+ self.enUS_revision = None
+ self.version = None
+ self.upload_urls = {}
+ self.locales_property = {}
+ self.package_urls = {}
+ self.pushdate = None
+ # upload_files is a dictionary of files to upload, keyed by locale.
+ self.upload_files = {}
+
+ if 'mock_target' in self.config:
+ self.enable_mock()
+
+ def _pre_config_lock(self, rw_config):
+ """replaces 'configuration_tokens' with their values, before the
+ configuration gets locked. If some of the configuration_tokens
+ are not present, stops the execution of the script"""
+ # since values as branch, platform are mandatory, can replace them in
+ # in the configuration before it is locked down
+ # mandatory tokens
+ for token in configuration_tokens:
+ if token not in self.config:
+ self.fatal('No %s in configuration!' % token)
+
+ # all the important tokens are present in our configuration
+ for token in configuration_tokens:
+ # token_string '%(branch)s'
+ token_string = ''.join(('%(', token, ')s'))
+ # token_value => ash
+ token_value = self.config[token]
+ for element in self.config:
+ # old_value => https://hg.mozilla.org/projects/%(branch)s
+ old_value = self.config[element]
+ # new_value => https://hg.mozilla.org/projects/ash
+ new_value = self.__detokenise_element(self.config[element],
+ token_string,
+ token_value)
+ if new_value and new_value != old_value:
+ msg = "%s: replacing %s with %s" % (element,
+ old_value,
+ new_value)
+ self.debug(msg)
+ self.config[element] = new_value
+
+ # now, only runtime_config_tokens should be present in config
+ # we should parse self.config and fail if any other we spot any
+ # other token
+ tokens_left = set(self._get_configuration_tokens(self.config))
+ unknown_tokens = set(tokens_left) - set(runtime_config_tokens)
+ if unknown_tokens:
+ msg = ['unknown tokens in configuration:']
+ for t in unknown_tokens:
+ msg.append(t)
+ self.fatal(' '.join(msg))
+ self.info('configuration looks ok')
+
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['mar_tools_url']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ def _get_configuration_tokens(self, iterable):
+ """gets a list of tokens in iterable"""
+ regex = re.compile('%\(\w+\)s')
+ results = []
+ try:
+ for element in iterable:
+ if isinstance(iterable, str):
+ # this is a string, look for tokens
+ # self.debug("{0}".format(re.findall(regex, element)))
+ tokens = re.findall(regex, iterable)
+ for token in tokens:
+ # clean %(branch)s => branch
+ # remove %(
+ token_name = token.partition('%(')[2]
+ # remove )s
+ token_name = token_name.partition(')s')[0]
+ results.append(token_name)
+ break
+
+ elif isinstance(iterable, (list, tuple)):
+ results.extend(self._get_configuration_tokens(element))
+
+ elif isinstance(iterable, dict):
+ results.extend(self._get_configuration_tokens(iterable[element]))
+
+ except TypeError:
+ # element is a int/float/..., nothing to do here
+ pass
+
+ # remove duplicates, and return results
+
+ return list(set(results))
+
+ def __detokenise_element(self, config_option, token, value):
+ """reads config_options and returns a version of the same config_option
+ replacing token with value recursively"""
+ # config_option is a string, let's replace token with value
+ if isinstance(config_option, str):
+ # if token does not appear in this string,
+ # nothing happens and the original value is returned
+ return config_option.replace(token, value)
+ # it's a dictionary
+ elif isinstance(config_option, dict):
+ # replace token for each element of this dictionary
+ for element in config_option:
+ config_option[element] = self.__detokenise_element(
+ config_option[element], token, value)
+ return config_option
+ # it's a list
+ elif isinstance(config_option, list):
+ # create a new list and append the replaced elements
+ new_list = []
+ for element in config_option:
+ new_list.append(self.__detokenise_element(element, token, value))
+ return new_list
+ elif isinstance(config_option, tuple):
+ # create a new list and append the replaced elements
+ new_list = []
+ for element in config_option:
+ new_list.append(self.__detokenise_element(element, token, value))
+ return tuple(new_list)
+ else:
+ # everything else, bool, number, ...
+ return config_option
+
+ # Helper methods {{{2
+ def query_bootstrap_env(self):
+ """returns the env for repacks"""
+ if self.bootstrap_env:
+ return self.bootstrap_env
+ config = self.config
+ replace_dict = self.query_abs_dirs()
+
+ replace_dict['en_us_binary_url'] = config.get('en_us_binary_url')
+ self.read_buildbot_config()
+ # Override en_us_binary_url if packageUrl is passed as a property from
+ # the en-US build
+ if self.buildbot_config["properties"].get("packageUrl"):
+ packageUrl = self.buildbot_config["properties"]["packageUrl"]
+ # trim off the filename, the build system wants a directory
+ packageUrl = packageUrl.rsplit('/', 1)[0]
+ self.info("Overriding en_us_binary_url with %s" % packageUrl)
+ replace_dict['en_us_binary_url'] = str(packageUrl)
+ # Override en_us_binary_url if passed as a buildbot property
+ if self.buildbot_config["properties"].get("en_us_binary_url"):
+ self.info("Overriding en_us_binary_url with %s" %
+ self.buildbot_config["properties"]["en_us_binary_url"])
+ replace_dict['en_us_binary_url'] = \
+ str(self.buildbot_config["properties"]["en_us_binary_url"])
+ bootstrap_env = self.query_env(partial_env=config.get("bootstrap_env"),
+ replace_dict=replace_dict)
+ if 'MOZ_SIGNING_SERVERS' in os.environ:
+ sign_cmd = self.query_moz_sign_cmd(formats=None)
+ sign_cmd = subprocess.list2cmdline(sign_cmd)
+ # windows fix
+ bootstrap_env['MOZ_SIGN_CMD'] = sign_cmd.replace('\\', '\\\\\\\\')
+ for binary in self._mar_binaries():
+ # "mar -> MAR" and 'mar.exe -> MAR' (windows)
+ name = binary.replace('.exe', '')
+ name = name.upper()
+ binary_path = os.path.join(self._mar_tool_dir(), binary)
+ # windows fix...
+ if binary.endswith('.exe'):
+ binary_path = binary_path.replace('\\', '\\\\\\\\')
+ bootstrap_env[name] = binary_path
+ if 'LOCALE_MERGEDIR' in bootstrap_env:
+ # windows fix
+ bootstrap_env['LOCALE_MERGEDIR'] = bootstrap_env['LOCALE_MERGEDIR'].replace('\\', '\\\\\\\\')
+ if self.query_is_nightly():
+ bootstrap_env["IS_NIGHTLY"] = "yes"
+ self.bootstrap_env = bootstrap_env
+ return self.bootstrap_env
+
+ def _query_upload_env(self):
+ """returns the environment used for the upload step"""
+ if self.upload_env:
+ return self.upload_env
+ config = self.config
+
+ replace_dict = {
+ 'buildid': self._query_buildid(),
+ 'version': self.query_version(),
+ 'post_upload_extra': ' '.join(config.get('post_upload_extra', [])),
+ 'upload_environment': config['upload_environment'],
+ }
+ if config['branch'] == 'try':
+ replace_dict.update({
+ 'who': self.query_who(),
+ 'revision': self._query_revision(),
+ })
+ upload_env = self.query_env(partial_env=config.get("upload_env"),
+ replace_dict=replace_dict)
+ # check if there are any extra option from the platform configuration
+ # and append them to the env
+
+ if 'upload_env_extra' in config:
+ for extra in config['upload_env_extra']:
+ upload_env[extra] = config['upload_env_extra'][extra]
+
+ self.upload_env = upload_env
+ return self.upload_env
+
+ def query_l10n_env(self):
+ l10n_env = self._query_upload_env().copy()
+ # both upload_env and bootstrap_env define MOZ_SIGN_CMD
+ # the one from upload_env is taken from os.environ, the one from
+ # bootstrap_env is set with query_moz_sign_cmd()
+ # we need to use the value provided my query_moz_sign_cmd or make upload
+ # will fail (signtool.py path is wrong)
+ l10n_env.update(self.query_bootstrap_env())
+ return l10n_env
+
+ def _query_make_ident_output(self):
+ """Get |make ident| output from the objdir.
+ Only valid after setup is run.
+ """
+ if self.make_ident_output:
+ return self.make_ident_output
+ dirs = self.query_abs_dirs()
+ self.make_ident_output = self._get_output_from_make(
+ target=["ident"],
+ cwd=dirs['abs_locales_dir'],
+ env=self.query_bootstrap_env())
+ return self.make_ident_output
+
+ def _query_buildid(self):
+ """Get buildid from the objdir.
+ Only valid after setup is run.
+ """
+ if self.buildid:
+ return self.buildid
+ r = re.compile(r"buildid (\d+)")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ match = r.match(line)
+ if match:
+ self.buildid = match.groups()[0]
+ return self.buildid
+
+ def _query_revision(self):
+ """ Get the gecko revision in this order of precedence
+ * cached value
+ * command line arg --revision (development, taskcluster)
+ * buildbot properties (try with buildbot forced build)
+ * buildbot change (try with buildbot scheduler)
+ * from the en-US build (m-c & m-a)
+
+ This will fail the last case if the build hasn't been pulled yet.
+ """
+ if self.revision:
+ return self.revision
+
+ self.read_buildbot_config()
+ config = self.config
+ revision = None
+ if config.get("revision"):
+ revision = config["revision"]
+ elif 'revision' in self.buildbot_properties:
+ revision = self.buildbot_properties['revision']
+ elif (self.buildbot_config and
+ self.buildbot_config.get('sourcestamp', {}).get('revision')):
+ revision = self.buildbot_config['sourcestamp']['revision']
+ elif self.buildbot_config and self.buildbot_config.get('revision'):
+ revision = self.buildbot_config['revision']
+ elif config.get("update_gecko_source_to_enUS", True):
+ revision = self._query_enUS_revision()
+
+ if not revision:
+ self.fatal("Can't determine revision!")
+ self.revision = str(revision)
+ return self.revision
+
+ def _query_enUS_revision(self):
+ """Get revision from the objdir.
+ Only valid after setup is run.
+ """
+ if self.enUS_revision:
+ return self.enUS_revision
+ r = re.compile(r"^(gecko|fx)_revision ([0-9a-f]+\+?)$")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ match = r.match(line)
+ if match:
+ self.enUS_revision = match.groups()[1]
+ return self.enUS_revision
+
+ def _query_make_variable(self, variable, make_args=None,
+ exclude_lines=PyMakeIgnoreList):
+ """returns the value of make echo-variable-<variable>
+ it accepts extra make arguements (make_args)
+ it also has an exclude_lines from the output filer
+ exclude_lines defaults to PyMakeIgnoreList because
+ on windows, pymake writes extra output lines that need
+ to be filtered out.
+ """
+ dirs = self.query_abs_dirs()
+ make_args = make_args or []
+ exclude_lines = exclude_lines or []
+ target = ["echo-variable-%s" % variable] + make_args
+ cwd = dirs['abs_locales_dir']
+ raw_output = self._get_output_from_make(target, cwd=cwd,
+ env=self.query_bootstrap_env())
+ # we want to log all the messages from make/pymake and
+ # exlcude some messages from the output ("Entering directory...")
+ output = []
+ for line in raw_output.split("\n"):
+ discard = False
+ for element in exclude_lines:
+ if element.match(line):
+ discard = True
+ continue
+ if not discard:
+ output.append(line.strip())
+ output = " ".join(output).strip()
+ self.info('echo-variable-%s: %s' % (variable, output))
+ return output
+
+ def query_version(self):
+ """Gets the version from the objdir.
+ Only valid after setup is run."""
+ if self.version:
+ return self.version
+ config = self.config
+ if config.get('release_config_file'):
+ release_config = self.query_release_config()
+ self.version = release_config['version']
+ else:
+ self.version = self._query_make_variable("MOZ_APP_VERSION")
+ return self.version
+
+ def _map(self, func, items):
+ """runs func for any item in items, calls the add_failure() for each
+ error. It assumes that function returns 0 when successful.
+ returns a two element tuple with (success_count, total_count)"""
+ success_count = 0
+ total_count = len(items)
+ name = func.__name__
+ for item in items:
+ result = func(item)
+ if result == SUCCESS:
+ # success!
+ success_count += 1
+ else:
+ # func failed...
+ message = 'failure: %s(%s)' % (name, item)
+ self._add_failure(item, message)
+ return (success_count, total_count)
+
+ def _add_failure(self, locale, message, **kwargs):
+ """marks current step as failed"""
+ self.locales_property[locale] = FAILURE_STR
+ prop_key = "%s_failure" % locale
+ prop_value = self.query_buildbot_property(prop_key)
+ if prop_value:
+ prop_value = "%s %s" % (prop_value, message)
+ else:
+ prop_value = message
+ self.set_buildbot_property(prop_key, prop_value, write_to_file=True)
+ BaseScript.add_failure(self, locale, message=message, **kwargs)
+
+ def query_failed_locales(self):
+ return [l for l, res in self.locales_property.items() if
+ res == FAILURE_STR]
+
+ def summary(self):
+ """generates a summary"""
+ BaseScript.summary(self)
+ # TODO we probably want to make this configurable on/off
+ locales = self.query_locales()
+ for locale in locales:
+ self.locales_property.setdefault(locale, SUCCESS_STR)
+ self.set_buildbot_property("locales",
+ json.dumps(self.locales_property),
+ write_to_file=True)
+
+ # Actions {{{2
+ def clobber(self):
+ """clobber"""
+ dirs = self.query_abs_dirs()
+ clobber_dirs = (dirs['abs_objdir'], dirs['abs_upload_dir'])
+ PurgeMixin.clobber(self, always_clobber_dirs=clobber_dirs)
+
+ def pull(self):
+ """pulls source code"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ repos = []
+ # replace dictionary for repos
+ # we need to interpolate some values:
+ # branch, branch_repo
+ # and user_repo_override if exists
+ replace_dict = {}
+ if config.get("user_repo_override"):
+ replace_dict['user_repo_override'] = config['user_repo_override']
+ # this is OK so early because we get it from buildbot, or
+ # the command line for local dev
+ replace_dict['revision'] = self._query_revision()
+
+ for repository in config['repos']:
+ current_repo = {}
+ for key, value in repository.iteritems():
+ try:
+ current_repo[key] = value % replace_dict
+ except TypeError:
+ # pass through non-interpolables, like booleans
+ current_repo[key] = value
+ except KeyError:
+ self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
+ raise
+ repos.append(current_repo)
+ self.info("repositories: %s" % repos)
+ self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
+ tag_override=config.get('tag_override'))
+
+ def clone_locales(self):
+ self.pull_locale_source()
+
+ def setup(self):
+ """setup step"""
+ dirs = self.query_abs_dirs()
+ self._run_tooltool()
+ self._copy_mozconfig()
+ self._mach_configure()
+ self._run_make_in_config_dir()
+ self.make_wget_en_US()
+ self.make_unpack_en_US()
+ self.download_mar_tools()
+
+ # on try we want the source we already have, otherwise update to the
+ # same as the en-US binary
+ if self.config.get("update_gecko_source_to_enUS", True):
+ revision = self._query_enUS_revision()
+ # TODO do this through VCSMixin instead of hardcoding hg
+ # self.update(dest=dirs["abs_mozilla_dir"], revision=revision)
+ hg = self.query_exe("hg")
+ self.run_command([hg, "update", "-r", revision],
+ cwd=dirs["abs_mozilla_dir"],
+ env=self.query_bootstrap_env(),
+ error_list=BaseErrorList,
+ halt_on_failure=True, fatal_exit_code=3)
+ # if checkout updates CLOBBER file with a newer timestamp,
+ # next make -f client.mk configure will delete archives
+ # downloaded with make wget_en_US, so just touch CLOBBER file
+ _clobber_file = self._clobber_file()
+ if os.path.exists(_clobber_file):
+ self._touch_file(_clobber_file)
+ # and again...
+ # thanks to the last hg update, we can be on different firefox 'version'
+ # than the one on default,
+ self._mach_configure()
+ self._run_make_in_config_dir()
+
+ def _run_make_in_config_dir(self):
+ """this step creates nsinstall, needed my make_wget_en_US()
+ """
+ dirs = self.query_abs_dirs()
+ config_dir = os.path.join(dirs['abs_objdir'], 'config')
+ env = self.query_bootstrap_env()
+ return self._make(target=['export'], cwd=config_dir, env=env)
+
+ def _clobber_file(self):
+ """returns the full path of the clobber file"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs['abs_objdir'], config.get('clobber_file'))
+
+ def _copy_mozconfig(self):
+ """copies the mozconfig file into abs_mozilla_dir/.mozconfig
+ and logs the content
+ """
+ config = self.config
+ dirs = self.query_abs_dirs()
+ mozconfig = config['mozconfig']
+ src = os.path.join(dirs['abs_work_dir'], mozconfig)
+ dst = os.path.join(dirs['abs_mozilla_dir'], '.mozconfig')
+ self.copyfile(src, dst)
+ self.read_from_file(dst, verbose=True)
+
+ def _mach(self, target, env, halt_on_failure=True, output_parser=None):
+ dirs = self.query_abs_dirs()
+ mach = self._get_mach_executable()
+ return self.run_command(mach + target,
+ halt_on_failure=True,
+ env=env,
+ cwd=dirs['abs_mozilla_dir'],
+ output_parser=None)
+
+ def _mach_configure(self):
+ """calls mach configure"""
+ env = self.query_bootstrap_env()
+ target = ["configure"]
+ return self._mach(target=target, env=env)
+
+ def _get_mach_executable(self):
+ python = self.query_exe('python2.7')
+ return [python, 'mach']
+
+ def _get_make_executable(self):
+ config = self.config
+ dirs = self.query_abs_dirs()
+ if config.get('enable_mozmake'): # e.g. windows
+ make = r"/".join([dirs['abs_mozilla_dir'], 'mozmake.exe'])
+ # mysterious subprocess errors, let's try to fix this path...
+ make = make.replace('\\', '/')
+ make = [make]
+ else:
+ make = ['make']
+ return make
+
+ def _make(self, target, cwd, env, error_list=MakefileErrorList,
+ halt_on_failure=True, output_parser=None):
+ """Runs make. Returns the exit code"""
+ make = self._get_make_executable()
+ if target:
+ make = make + target
+ return self.run_command(make,
+ cwd=cwd,
+ env=env,
+ error_list=error_list,
+ halt_on_failure=halt_on_failure,
+ output_parser=output_parser)
+
+ def _get_output_from_make(self, target, cwd, env, halt_on_failure=True, ignore_errors=False):
+ """runs make and returns the output of the command"""
+ make = self._get_make_executable()
+ return self.get_output_from_command(make + target,
+ cwd=cwd,
+ env=env,
+ silent=True,
+ halt_on_failure=halt_on_failure,
+ ignore_errors=ignore_errors)
+
+ def make_unpack_en_US(self):
+ """wrapper for make unpack"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ env = self.query_bootstrap_env()
+ cwd = os.path.join(dirs['abs_objdir'], config['locales_dir'])
+ return self._make(target=["unpack"], cwd=cwd, env=env)
+
+ def make_wget_en_US(self):
+ """wrapper for make wget-en-US"""
+ env = self.query_bootstrap_env()
+ dirs = self.query_abs_dirs()
+ cwd = dirs['abs_locales_dir']
+ return self._make(target=["wget-en-US"], cwd=cwd, env=env)
+
+ def make_upload(self, locale):
+ """wrapper for make upload command"""
+ config = self.config
+ env = self.query_l10n_env()
+ dirs = self.query_abs_dirs()
+ buildid = self._query_buildid()
+ replace_dict = {
+ 'buildid': buildid,
+ 'branch': config['branch']
+ }
+ try:
+ env['POST_UPLOAD_CMD'] = config['base_post_upload_cmd'] % replace_dict
+ except KeyError:
+ # no base_post_upload_cmd in configuration, just skip it
+ pass
+ target = ['upload', 'AB_CD=%s' % (locale)]
+ cwd = dirs['abs_locales_dir']
+ parser = MakeUploadOutputParser(config=self.config,
+ log_obj=self.log_obj)
+ retval = self._make(target=target, cwd=cwd, env=env,
+ halt_on_failure=False, output_parser=parser)
+ if locale not in self.package_urls:
+ self.package_urls[locale] = {}
+ self.package_urls[locale].update(parser.matches)
+ if retval == SUCCESS:
+ self.info('Upload successful (%s)' % locale)
+ ret = SUCCESS
+ else:
+ self.error('failed to upload %s' % locale)
+ ret = FAILURE
+ return ret
+
+ def set_upload_files(self, locale):
+ # The tree doesn't have a good way of exporting the list of files
+ # created during locale generation, but we can grab them by echoing the
+ # UPLOAD_FILES variable for each locale.
+ env = self.query_l10n_env()
+ target = ['echo-variable-UPLOAD_FILES', 'echo-variable-CHECKSUM_FILES',
+ 'AB_CD=%s' % locale]
+ dirs = self.query_abs_dirs()
+ cwd = dirs['abs_locales_dir']
+ # Bug 1242771 - echo-variable-UPLOAD_FILES via mozharness fails when stderr is found
+ # we should ignore stderr as unfortunately it's expected when parsing for values
+ output = self._get_output_from_make(target=target, cwd=cwd, env=env,
+ ignore_errors=True)
+ self.info('UPLOAD_FILES is "%s"' % output)
+ files = shlex.split(output)
+ if not files:
+ self.error('failed to get upload file list for locale %s' % locale)
+ return FAILURE
+
+ self.upload_files[locale] = [
+ os.path.abspath(os.path.join(cwd, f)) for f in files
+ ]
+ return SUCCESS
+
+ def make_installers(self, locale):
+ """wrapper for make installers-(locale)"""
+ env = self.query_l10n_env()
+ self._copy_mozconfig()
+ dirs = self.query_abs_dirs()
+ cwd = os.path.join(dirs['abs_locales_dir'])
+ target = ["installers-%s" % locale,
+ "LOCALE_MERGEDIR=%s" % env["LOCALE_MERGEDIR"], ]
+ return self._make(target=target, cwd=cwd,
+ env=env, halt_on_failure=False)
+
+ def repack_locale(self, locale):
+ """wraps the logic for compare locale, make installers and generating
+ complete updates."""
+
+ if self.run_compare_locales(locale) != SUCCESS:
+ self.error("compare locale %s failed" % (locale))
+ return FAILURE
+
+ # compare locale succeeded, run make installers
+ if self.make_installers(locale) != SUCCESS:
+ self.error("make installers-%s failed" % (locale))
+ return FAILURE
+
+ # now try to upload the artifacts
+ if self.make_upload(locale):
+ self.error("make upload for locale %s failed!" % (locale))
+ return FAILURE
+
+ # set_upload_files() should be called after make upload, to make sure
+ # we have all files in place (checksums, etc)
+ if self.set_upload_files(locale):
+ self.error("failed to get list of files to upload for locale %s" % locale)
+ return FAILURE
+
+ return SUCCESS
+
+ def repack(self):
+ """creates the repacks and udpates"""
+ self._map(self.repack_locale, self.query_locales())
+
+ def _query_objdir(self):
+ """returns objdir name from configuration"""
+ return self.config['objdir']
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(DesktopSingleLocale, self).query_abs_dirs()
+ for directory in abs_dirs:
+ value = abs_dirs[directory]
+ abs_dirs[directory] = value
+ dirs = {}
+ dirs['abs_tools_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tools')
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def submit_to_balrog(self):
+ """submit to balrog"""
+ if not self.config.get("balrog_servers"):
+ self.info("balrog_servers not set; skipping balrog submission.")
+ return
+ self.info("Reading buildbot build properties...")
+ self.read_buildbot_config()
+ # get platform, appName and hashType from configuration
+ # common values across different locales
+ config = self.config
+ platform = config["platform"]
+ hashType = config['hashType']
+ appName = config['appName']
+ branch = config['branch']
+ # values from configuration
+ self.set_buildbot_property("branch", branch)
+ self.set_buildbot_property("appName", appName)
+ # it's hardcoded to sha512 in balrog.py
+ self.set_buildbot_property("hashType", hashType)
+ self.set_buildbot_property("platform", platform)
+ # values common to the current repacks
+ self.set_buildbot_property("buildid", self._query_buildid())
+ self.set_buildbot_property("appVersion", self.query_version())
+
+ # submit complete mar to balrog
+ # clean up buildbot_properties
+ self._map(self.submit_repack_to_balrog, self.query_locales())
+
+ def submit_repack_to_balrog(self, locale):
+ """submit a single locale to balrog"""
+ # check if locale has been uploaded, if not just return a FAILURE
+ if locale not in self.package_urls:
+ self.error("%s is not present in package_urls. Did you run make upload?" % locale)
+ return FAILURE
+
+ if not self.query_is_nightly():
+ # remove this check when we extend this script to non-nightly builds
+ self.fatal("Not a nightly build")
+ return FAILURE
+
+ # complete mar file
+ c_marfile = self._query_complete_mar_filename(locale)
+ c_mar_url = self._query_complete_mar_url(locale)
+
+ # Set other necessary properties for Balrog submission. None need to
+ # be passed back to buildbot, so we won't write them to the properties
+ # files
+ # Locale is hardcoded to en-US, for silly reasons
+ # The Balrog submitter translates this platform into a build target
+ # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
+ self.set_buildbot_property("completeMarSize", self.query_filesize(c_marfile))
+ self.set_buildbot_property("completeMarHash", self.query_sha512sum(c_marfile))
+ self.set_buildbot_property("completeMarUrl", c_mar_url)
+ self.set_buildbot_property("locale", locale)
+ if "partialInfo" in self.package_urls[locale]:
+ self.set_buildbot_property("partialInfo",
+ self.package_urls[locale]["partialInfo"])
+ ret = FAILURE
+ try:
+ result = self.submit_balrog_updates()
+ self.info("balrog return code: %s" % (result))
+ if result == 0:
+ ret = SUCCESS
+ except Exception as error:
+ self.error("submit repack to balrog failed: %s" % (str(error)))
+ return ret
+
+ def _query_complete_mar_filename(self, locale):
+ """returns the full path to a localized complete mar file"""
+ config = self.config
+ version = self.query_version()
+ complete_mar_name = config['localized_mar'] % {'version': version,
+ 'locale': locale}
+ return os.path.join(self._update_mar_dir(), complete_mar_name)
+
+ def _query_complete_mar_url(self, locale):
+ """returns the complete mar url taken from self.package_urls[locale]
+ this value is available only after make_upload"""
+ if "complete_mar_url" in self.config:
+ return self.config["complete_mar_url"]
+ if "completeMarUrl" in self.package_urls[locale]:
+ return self.package_urls[locale]["completeMarUrl"]
+ # url = self.config.get("update", {}).get("mar_base_url")
+ # if url:
+ # url += os.path.basename(self.query_marfile_path())
+ # return url.format(branch=self.query_branch())
+ self.fatal("Couldn't find complete mar url in config or package_urls")
+
+ def _update_mar_dir(self):
+ """returns the full path of the update/ directory"""
+ return self._mar_dir('update_mar_dir')
+
+ def _mar_binaries(self):
+ """returns a tuple with mar and mbsdiff paths"""
+ config = self.config
+ return (config['mar'], config['mbsdiff'])
+
+ def _mar_dir(self, dirname):
+ """returns the full path of dirname;
+ dirname is an entry in configuration"""
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs['abs_objdir'], self.config[dirname])
+
+ # TODO: replace with ToolToolMixin
+ def _get_tooltool_auth_file(self):
+ # set the default authentication file based on platform; this
+ # corresponds to where puppet puts the token
+ if 'tooltool_authentication_file' in self.config:
+ fn = self.config['tooltool_authentication_file']
+ elif self._is_windows():
+ fn = r'c:\builds\relengapi.tok'
+ else:
+ fn = '/builds/relengapi.tok'
+
+ # if the file doesn't exist, don't pass it to tooltool (it will just
+ # fail). In taskcluster, this will work OK as the relengapi-proxy will
+ # take care of auth. Everywhere else, we'll get auth failures if
+ # necessary.
+ if os.path.exists(fn):
+ return fn
+
+ def _run_tooltool(self):
+ config = self.config
+ dirs = self.query_abs_dirs()
+ if not config.get('tooltool_manifest_src'):
+ return self.warning(ERROR_MSGS['tooltool_manifest_undetermined'])
+ fetch_script_path = os.path.join(dirs['abs_tools_dir'],
+ 'scripts/tooltool/tooltool_wrapper.sh')
+ tooltool_manifest_path = os.path.join(dirs['abs_mozilla_dir'],
+ config['tooltool_manifest_src'])
+ cmd = [
+ 'sh',
+ fetch_script_path,
+ tooltool_manifest_path,
+ config['tooltool_url'],
+ config['tooltool_bootstrap'],
+ ]
+ cmd.extend(config['tooltool_script'])
+ auth_file = self._get_tooltool_auth_file()
+ if auth_file and os.path.exists(auth_file):
+ cmd.extend(['--authentication-file', auth_file])
+ cache = config['bootstrap_env'].get('TOOLTOOL_CACHE')
+ if cache:
+ cmd.extend(['-c', cache])
+ self.info(str(cmd))
+ self.run_command(cmd, cwd=dirs['abs_mozilla_dir'], halt_on_failure=True)
+
+ def funsize_props(self):
+ """Set buildbot properties required to trigger funsize tasks
+ responsible to generate partial updates for successfully generated locales"""
+ locales = self.query_locales()
+ funsize_info = {
+ 'locales': locales,
+ 'branch': self.config['branch'],
+ 'appName': self.config['appName'],
+ 'platform': self.config['platform'],
+ 'completeMarUrls': {locale: self._query_complete_mar_url(locale) for locale in locales},
+ }
+ self.info('funsize info: %s' % funsize_info)
+ self.set_buildbot_property('funsize_info', json.dumps(funsize_info),
+ write_to_file=True)
+
+ def taskcluster_upload(self):
+ auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
+ credentials = {}
+ execfile(auth, credentials)
+ client_id = credentials.get('taskcluster_clientId')
+ access_token = credentials.get('taskcluster_accessToken')
+ if not client_id or not access_token:
+ self.warning('Skipping S3 file upload: No taskcluster credentials.')
+ return
+
+ # We need to activate the virtualenv so that we can import taskcluster
+ # (and its dependent modules, like requests and hawk). Normally we
+ # could create the virtualenv as an action, but due to some odd
+ # dependencies with query_build_env() being called from build(), which
+ # is necessary before the virtualenv can be created.
+ self.disable_mock()
+ self.create_virtualenv()
+ self.enable_mock()
+ self.activate_virtualenv()
+
+ branch = self.config['branch']
+ revision = self._query_revision()
+ repo = self.query_l10n_repo()
+ if not repo:
+ self.fatal("Unable to determine repository for querying the push info.")
+ pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hg')
+ pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))
+
+ routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
+ 'testing/mozharness/configs/routes.json')
+ with open(routes_json) as f:
+ contents = json.load(f)
+ templates = contents['l10n']
+
+ # Release promotion creates a special task to accumulate all artifacts
+ # under the same task
+ artifacts_task = None
+ self.read_buildbot_config()
+ if "artifactsTaskId" in self.buildbot_config.get("properties", {}):
+ artifacts_task_id = self.buildbot_config["properties"]["artifactsTaskId"]
+ artifacts_tc = Taskcluster(
+ branch=branch, rank=pushinfo.pushdate, client_id=client_id,
+ access_token=access_token, log_obj=self.log_obj,
+ task_id=artifacts_task_id)
+ artifacts_task = artifacts_tc.get_task(artifacts_task_id)
+ artifacts_tc.claim_task(artifacts_task)
+
+ for locale, files in self.upload_files.iteritems():
+ self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
+ routes = []
+ for template in templates:
+ fmt = {
+ 'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
+ 'project': branch,
+ 'head_rev': revision,
+ 'pushdate': pushdate,
+ 'year': pushdate[0:4],
+ 'month': pushdate[4:6],
+ 'day': pushdate[6:8],
+ 'build_product': self.config['stage_product'],
+ 'build_name': self.query_build_name(),
+ 'build_type': self.query_build_type(),
+ 'locale': locale,
+ }
+ fmt.update(self.buildid_to_dict(self._query_buildid()))
+ routes.append(template.format(**fmt))
+
+ self.info('Using routes: %s' % routes)
+ tc = Taskcluster(branch,
+ pushinfo.pushdate, # Use pushdate as the rank
+ client_id,
+ access_token,
+ self.log_obj,
+ )
+ task = tc.create_task(routes)
+ tc.claim_task(task)
+
+ for upload_file in files:
+ # Create an S3 artifact for each file that gets uploaded. We also
+ # check the uploaded file against the property conditions so that we
+ # can set the buildbot config with the correct URLs for package
+ # locations.
+ artifact_url = tc.create_artifact(task, upload_file)
+ if artifacts_task:
+ artifacts_tc.create_reference_artifact(
+ artifacts_task, upload_file, artifact_url)
+
+ tc.report_completed(task)
+
+ if artifacts_task:
+ if not self.query_failed_locales():
+ artifacts_tc.report_completed(artifacts_task)
+ else:
+ # If some locales fail, we want to mark the artifacts
+ # task failed, so a retry can reuse the same task ID
+ artifacts_tc.report_failed(artifacts_task)
+
+
+# main {{{
+if __name__ == '__main__':
+ single_locale = DesktopSingleLocale()
+ single_locale.run_and_exit()
diff --git a/testing/mozharness/scripts/desktop_partner_repacks.py b/testing/mozharness/scripts/desktop_partner_repacks.py
new file mode 100755
index 000000000..ff07dffc8
--- /dev/null
+++ b/testing/mozharness/scripts/desktop_partner_repacks.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""desktop_partner_repacks.py
+
+This script manages Desktop partner repacks for beta/release builds.
+"""
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.base.python import VirtualenvMixin
+from mozharness.base.log import FATAL
+
+
+# DesktopPartnerRepacks {{{1
+class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
+ BaseScript, VirtualenvMixin):
+ """Manages desktop partner repacks"""
+ actions = [
+ "clobber",
+ "create-virtualenv",
+ "activate-virtualenv",
+ "setup",
+ "repack",
+ "summary",
+ ]
+ config_options = [
+ [["--version", "-v"], {
+ "dest": "version",
+ "help": "Version of Firefox to repack",
+ }],
+ [["--build-number", "-n"], {
+ "dest": "build_number",
+ "help": "Build number of Firefox to repack",
+ }],
+ [["--platform"], {
+ "dest": "platform",
+ "help": "Platform to repack (e.g. linux64, macosx64, ...)",
+ }],
+ [["--partner", "-p"], {
+ "dest": "partner",
+ "help": "Limit repackaging to partners matching this string",
+ }],
+ [["--s3cfg"], {
+ "dest": "s3cfg",
+ "help": "Configuration file for uploading to S3 using s3cfg",
+ }],
+ [["--hgroot"], {
+ "dest": "hgroot",
+ "help": "Use a different hg server for retrieving files",
+ }],
+ [["--hgrepo"], {
+ "dest": "hgrepo",
+ "help": "Use a different base repo for retrieving files",
+ }],
+ [["--require-buildprops"], {
+ "action": "store_true",
+ "dest": "require_buildprops",
+ "default": False,
+ "help": "Read in config options (like partner) from the buildbot properties file."
+ }],
+ ]
+
+ def __init__(self):
+ # fxbuild style:
+ buildscript_kwargs = {
+ 'all_actions': DesktopPartnerRepacks.actions,
+ 'default_actions': DesktopPartnerRepacks.actions,
+ 'config': {
+ 'buildbot_json_path': 'buildprops.json',
+ "log_name": "partner-repacks",
+ "hashType": "sha512",
+ 'virtualenv_modules': [
+ 'requests==2.2.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.15',
+ 's3cmd==1.6.0',
+ ],
+ 'virtualenv_path': 'venv',
+ 'workdir': 'partner-repacks',
+ },
+ }
+ #
+
+ BaseScript.__init__(
+ self,
+ config_options=self.config_options,
+ **buildscript_kwargs
+ )
+
+
+ def _pre_config_lock(self, rw_config):
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ else:
+ if self.config.get('require_buildprops', False) is True:
+ if not self.buildbot_config:
+ self.fatal("Unable to load properties from file: %s" % self.config.get('buildbot_json_path'))
+ props = self.buildbot_config["properties"]
+ for prop in ['version', 'build_number', 'revision', 'repo_file', 'repack_manifests_url', 'partner']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ if 'version' not in self.config:
+ self.fatal("Version (-v) not supplied.")
+ if 'build_number' not in self.config:
+ self.fatal("Build number (-n) not supplied.")
+ if 'repo_file' not in self.config:
+ self.fatal("repo_file not supplied.")
+ if 'repack_manifests_url' not in self.config:
+ self.fatal("repack_manifests_url not supplied.")
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(DesktopPartnerRepacks, self).query_abs_dirs()
+ for directory in abs_dirs:
+ value = abs_dirs[directory]
+ abs_dirs[directory] = value
+ dirs = {}
+ dirs['abs_repo_dir'] = os.path.join(abs_dirs['abs_work_dir'], '.repo')
+ dirs['abs_partners_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'partners')
+ dirs['abs_scripts_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'scripts')
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ # Actions {{{
+ def _repo_cleanup(self):
+ self.rmtree(self.query_abs_dirs()['abs_repo_dir'])
+ self.rmtree(self.query_abs_dirs()['abs_partners_dir'])
+ self.rmtree(self.query_abs_dirs()['abs_scripts_dir'])
+
+ def _repo_init(self, repo):
+ status = self.run_command([repo, "init", "--no-repo-verify",
+ "-u", self.config['repack_manifests_url']],
+ cwd=self.query_abs_dirs()['abs_work_dir'])
+ if status:
+ return status
+ return self.run_command([repo, "sync"],
+ cwd=self.query_abs_dirs()['abs_work_dir'])
+
+ def setup(self):
+ """setup step"""
+ repo = self.download_file(self.config['repo_file'],
+ file_name='repo',
+ parent_dir=self.query_abs_dirs()['abs_work_dir'],
+ error_level=FATAL)
+ if not os.path.exists(repo):
+ self.fatal("Unable to download repo tool.")
+ self.chmod(repo, 0755)
+ self.retry(self._repo_init,
+ args=(repo,),
+ error_level=FATAL,
+ cleanup=self._repo_cleanup(),
+ good_statuses=[0],
+ sleeptime=5)
+
+ def repack(self):
+ """creates the repacks"""
+ python = self.query_exe("python2.7")
+ repack_cmd = [python, "partner-repacks.py",
+ "-v", self.config['version'],
+ "-n", str(self.config['build_number'])]
+ if self.config.get('platform'):
+ repack_cmd.extend(["--platform", self.config['platform']])
+ if self.config.get('partner'):
+ repack_cmd.extend(["--partner", self.config['partner']])
+ if self.config.get('s3cfg'):
+ repack_cmd.extend(["--s3cfg", self.config['s3cfg']])
+ if self.config.get('hgroot'):
+ repack_cmd.extend(["--hgroot", self.config['hgroot']])
+ if self.config.get('hgrepo'):
+ repack_cmd.extend(["--repo", self.config['hgrepo']])
+ if self.config.get('revision'):
+ repack_cmd.extend(["--tag", self.config["revision"]])
+
+ return self.run_command(repack_cmd,
+ cwd=self.query_abs_dirs()['abs_scripts_dir'])
+
+# main {{{
+if __name__ == '__main__':
+ partner_repacks = DesktopPartnerRepacks()
+ partner_repacks.run_and_exit()
diff --git a/testing/mozharness/scripts/desktop_unittest.py b/testing/mozharness/scripts/desktop_unittest.py
new file mode 100755
index 000000000..b2e754567
--- /dev/null
+++ b/testing/mozharness/scripts/desktop_unittest.py
@@ -0,0 +1,742 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""desktop_unittest.py
+The goal of this is to extract desktop unittesting from buildbot's factory.py
+
+author: Jordan Lund
+"""
+
+import os
+import re
+import sys
+import copy
+import shutil
+import glob
+import imp
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import BaseErrorList
+from mozharness.base.log import INFO, ERROR
+from mozharness.base.script import PreScriptAction
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.buildbot import TBPL_EXCEPTION
+from mozharness.mozilla.mozbase import MozbaseMixin
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+from mozharness.mozilla.testing.errors import HarnessErrorList
+from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
+from mozharness.mozilla.testing.codecoverage import (
+ CodeCoverageMixin,
+ code_coverage_config_options
+)
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+
+SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell', 'mozbase', 'mozmill']
+SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
+
+
+# DesktopUnittest {{{1
+class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin, CodeCoverageMixin):
+ config_options = [
+ [['--mochitest-suite', ], {
+ "action": "extend",
+ "dest": "specified_mochitest_suites",
+ "type": "string",
+ "help": "Specify which mochi suite to run. "
+ "Suites are defined in the config file.\n"
+ "Examples: 'all', 'plain1', 'plain5', 'chrome', or 'a11y'"}
+ ],
+ [['--reftest-suite', ], {
+ "action": "extend",
+ "dest": "specified_reftest_suites",
+ "type": "string",
+ "help": "Specify which reftest suite to run. "
+ "Suites are defined in the config file.\n"
+ "Examples: 'all', 'crashplan', or 'jsreftest'"}
+ ],
+ [['--xpcshell-suite', ], {
+ "action": "extend",
+ "dest": "specified_xpcshell_suites",
+ "type": "string",
+ "help": "Specify which xpcshell suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'xpcshell'"}
+ ],
+ [['--cppunittest-suite', ], {
+ "action": "extend",
+ "dest": "specified_cppunittest_suites",
+ "type": "string",
+ "help": "Specify which cpp unittest suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'cppunittest'"}
+ ],
+ [['--gtest-suite', ], {
+ "action": "extend",
+ "dest": "specified_gtest_suites",
+ "type": "string",
+ "help": "Specify which gtest suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'gtest'"}
+ ],
+ [['--jittest-suite', ], {
+ "action": "extend",
+ "dest": "specified_jittest_suites",
+ "type": "string",
+ "help": "Specify which jit-test suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'jittest'"}
+ ],
+ [['--mozbase-suite', ], {
+ "action": "extend",
+ "dest": "specified_mozbase_suites",
+ "type": "string",
+ "help": "Specify which mozbase suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'mozbase'"}
+ ],
+ [['--mozmill-suite', ], {
+ "action": "extend",
+ "dest": "specified_mozmill_suites",
+ "type": "string",
+ "help": "Specify which mozmill suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'mozmill'"}
+ ],
+ [['--run-all-suites', ], {
+ "action": "store_true",
+ "dest": "run_all_suites",
+ "default": False,
+ "help": "This will run all suites that are specified "
+ "in the config file. You do not need to specify "
+ "any other suites.\nBeware, this may take a while ;)"}
+ ],
+ [['--e10s', ], {
+ "action": "store_true",
+ "dest": "e10s",
+ "default": False,
+ "help": "Run tests with multiple processes."}
+ ],
+ [['--strict-content-sandbox', ], {
+ "action": "store_true",
+ "dest": "strict_content_sandbox",
+ "default": False,
+ "help": "Run tests with a more strict content sandbox (Windows only)."}
+ ],
+ [['--no-random', ], {
+ "action": "store_true",
+ "dest": "no_random",
+ "default": False,
+ "help": "Run tests with no random intermittents and bisect in case of real failure."}
+ ],
+ [["--total-chunks"], {
+ "action": "store",
+ "dest": "total_chunks",
+ "help": "Number of total chunks"}
+ ],
+ [["--this-chunk"], {
+ "action": "store",
+ "dest": "this_chunk",
+ "help": "Number of this chunk"}
+ ],
+ [["--allow-software-gl-layers"], {
+ "action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
+ ],
+ ] + copy.deepcopy(testing_config_options) + \
+ copy.deepcopy(blobupload_config_options) + \
+ copy.deepcopy(code_coverage_config_options)
+
+ def __init__(self, require_config_file=True):
+ # abs_dirs defined already in BaseScript but is here to make pylint happy
+ self.abs_dirs = None
+ super(DesktopUnittest, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'stage-files',
+ 'run-tests',
+ ],
+ require_config_file=require_config_file,
+ config={'require_test_zip': True})
+
+ c = self.config
+ self.global_test_options = []
+ self.installer_url = c.get('installer_url')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+ self.symbols_url = c.get('symbols_url')
+ # this is so mozinstall in install() doesn't bug out if we don't run
+ # the download_and_extract action
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.abs_app_dir = None
+ self.abs_res_dir = None
+
+ # Construct an identifier to be used to identify Perfherder data
+ # for resource monitoring recording. This attempts to uniquely
+ # identify this test invocation configuration.
+ perfherder_parts = []
+ perfherder_options = []
+ suites = (
+ ('specified_mochitest_suites', 'mochitest'),
+ ('specified_reftest_suites', 'reftest'),
+ ('specified_xpcshell_suites', 'xpcshell'),
+ ('specified_cppunittest_suites', 'cppunit'),
+ ('specified_gtest_suites', 'gtest'),
+ ('specified_jittest_suites', 'jittest'),
+ ('specified_mozbase_suites', 'mozbase'),
+ ('specified_mozmill_suites', 'mozmill'),
+ )
+ for s, prefix in suites:
+ if s in c:
+ perfherder_parts.append(prefix)
+ perfherder_parts.extend(c[s])
+
+ if 'this_chunk' in c:
+ perfherder_parts.append(c['this_chunk'])
+
+ if c['e10s']:
+ perfherder_options.append('e10s')
+
+ self.resource_monitor_perfherder_id = ('.'.join(perfherder_parts),
+ perfherder_options)
+
+ # helper methods {{{2
+ def _pre_config_lock(self, rw_config):
+ super(DesktopUnittest, self)._pre_config_lock(rw_config)
+ c = self.config
+ if not c.get('run_all_suites'):
+ return # configs are valid
+ for category in SUITE_CATEGORIES:
+ specific_suites = c.get('specified_%s_suites' % (category))
+ if specific_suites:
+ if specific_suites != 'all':
+ self.fatal("Config options are not valid. Please ensure"
+ " that if the '--run-all-suites' flag was enabled,"
+ " then do not specify to run only specific suites "
+ "like:\n '--mochitest-suite browser-chrome'")
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(DesktopUnittest, self).query_abs_dirs()
+
+ c = self.config
+ dirs = {}
+ dirs['abs_app_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'application')
+ dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
+ dirs['abs_test_extensions_dir'] = os.path.join(dirs['abs_test_install_dir'], 'extensions')
+ dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
+ dirs['abs_test_bin_plugins_dir'] = os.path.join(dirs['abs_test_bin_dir'],
+ 'plugins')
+ dirs['abs_test_bin_components_dir'] = os.path.join(dirs['abs_test_bin_dir'],
+ 'components')
+ dirs['abs_mochitest_dir'] = os.path.join(dirs['abs_test_install_dir'], "mochitest")
+ dirs['abs_reftest_dir'] = os.path.join(dirs['abs_test_install_dir'], "reftest")
+ dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
+ dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
+ dirs['abs_gtest_dir'] = os.path.join(dirs['abs_test_install_dir'], "gtest")
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+ dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "jit-test", "jit-test")
+ dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
+ dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
+
+ if os.path.isabs(c['virtualenv_path']):
+ dirs['abs_virtualenv_dir'] = c['virtualenv_path']
+ else:
+ dirs['abs_virtualenv_dir'] = os.path.join(abs_dirs['abs_work_dir'],
+ c['virtualenv_path'])
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def query_abs_app_dir(self):
+ """We can't set this in advance, because OSX install directories
+ change depending on branding and opt/debug.
+ """
+ if self.abs_app_dir:
+ return self.abs_app_dir
+ if not self.binary_path:
+ self.fatal("Can't determine abs_app_dir (binary_path not set!)")
+ self.abs_app_dir = os.path.dirname(self.binary_path)
+ return self.abs_app_dir
+
+ def query_abs_res_dir(self):
+ """The directory containing resources like plugins and extensions. On
+ OSX this is Contents/Resources, on all other platforms its the same as
+ the app dir.
+
+ As with the app dir, we can't set this in advance, because OSX install
+ directories change depending on branding and opt/debug.
+ """
+ if self.abs_res_dir:
+ return self.abs_res_dir
+
+ abs_app_dir = self.query_abs_app_dir()
+ if self._is_darwin():
+ res_subdir = self.config.get("mac_res_subdir", "Resources")
+ self.abs_res_dir = os.path.join(os.path.dirname(abs_app_dir), res_subdir)
+ else:
+ self.abs_res_dir = abs_app_dir
+ return self.abs_res_dir
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+
+ self.register_virtualenv_module(name='pip>=1.5')
+ self.register_virtualenv_module('psutil==3.1.1', method='pip')
+ self.register_virtualenv_module(name='mock')
+ self.register_virtualenv_module(name='simplejson')
+
+ requirements_files = [
+ os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'marionette_requirements.txt')]
+
+ if os.path.isdir(dirs['abs_mochitest_dir']):
+ # mochitest is the only thing that needs this
+ requirements_files.append(
+ os.path.join(dirs['abs_mochitest_dir'],
+ 'websocketprocessbridge',
+ 'websocketprocessbridge_requirements.txt'))
+
+ for requirements_file in requirements_files:
+ self.register_virtualenv_module(requirements=[requirements_file],
+ two_pass=True)
+
+ def _query_symbols_url(self):
+ """query the full symbols URL based upon binary URL"""
+ # may break with name convention changes but is one less 'input' for script
+ if self.symbols_url:
+ return self.symbols_url
+
+ symbols_url = None
+ self.info("finding symbols_url based upon self.installer_url")
+ if self.installer_url:
+ for ext in ['.zip', '.dmg', '.tar.bz2']:
+ if ext in self.installer_url:
+ symbols_url = self.installer_url.replace(
+ ext, '.crashreporter-symbols.zip')
+ if not symbols_url:
+ self.fatal("self.installer_url was found but symbols_url could \
+ not be determined")
+ else:
+ self.fatal("self.installer_url was not found in self.config")
+ self.info("setting symbols_url as %s" % (symbols_url))
+ self.symbols_url = symbols_url
+ return self.symbols_url
+
+ def _query_abs_base_cmd(self, suite_category, suite):
+ if self.binary_path:
+ c = self.config
+ dirs = self.query_abs_dirs()
+ run_file = c['run_file_names'][suite_category]
+ base_cmd = [self.query_python_path('python'), '-u']
+ base_cmd.append(os.path.join(dirs["abs_%s_dir" % suite_category], run_file))
+ abs_app_dir = self.query_abs_app_dir()
+ abs_res_dir = self.query_abs_res_dir()
+
+ raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_raw.log' % suite)
+
+ error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_errorsummary.log' % suite)
+ str_format_values = {
+ 'binary_path': self.binary_path,
+ 'symbols_path': self._query_symbols_url(),
+ 'abs_app_dir': abs_app_dir,
+ 'abs_res_dir': abs_res_dir,
+ 'raw_log_file': raw_log_file,
+ 'error_summary_file': error_summary_file,
+ 'gtest_dir': os.path.join(dirs['abs_test_install_dir'],
+ 'gtest'),
+ }
+
+ # TestingMixin._download_and_extract_symbols() will set
+ # self.symbols_path when downloading/extracting.
+ if self.symbols_path:
+ str_format_values['symbols_path'] = self.symbols_path
+
+ if suite_category in SUITE_DEFAULT_E10S and not c['e10s']:
+ base_cmd.append('--disable-e10s')
+ elif suite_category not in SUITE_DEFAULT_E10S and c['e10s']:
+ base_cmd.append('--e10s')
+
+ if c.get('strict_content_sandbox'):
+ if suite_category == "mochitest":
+ base_cmd.append('--strict-content-sandbox')
+ else:
+ self.fatal("--strict-content-sandbox only works with mochitest suites.")
+
+ if c.get('total_chunks') and c.get('this_chunk'):
+ base_cmd.extend(['--total-chunks', c['total_chunks'],
+ '--this-chunk', c['this_chunk']])
+
+ if c['no_random']:
+ if suite_category == "mochitest":
+ base_cmd.append('--bisect-chunk=default')
+ else:
+ self.warning("--no-random does not currently work with suites other than mochitest.")
+
+ # set pluginsPath
+ abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
+ str_format_values['test_plugin_path'] = abs_res_plugins_dir
+
+ if suite_category not in c["suite_definitions"]:
+ self.fatal("'%s' not defined in the config!")
+
+ if suite in ('browser-chrome-coverage', 'xpcshell-coverage', 'mochitest-devtools-chrome-coverage'):
+ base_cmd.append('--jscov-dir-prefix=%s' %
+ dirs['abs_blob_upload_dir'])
+
+ options = c["suite_definitions"][suite_category]["options"]
+ if options:
+ for option in options:
+ option = option % str_format_values
+ if not option.endswith('None'):
+ base_cmd.append(option)
+ if self.structured_output(
+ suite_category,
+ self._query_try_flavor(suite_category, suite)
+ ):
+ base_cmd.append("--log-raw=-")
+ return base_cmd
+ else:
+ self.warning("Suite options for %s could not be determined."
+ "\nIf you meant to have options for this suite, "
+ "please make sure they are specified in your "
+ "config under %s_options" %
+ (suite_category, suite_category))
+
+ return base_cmd
+ else:
+ self.fatal("'binary_path' could not be determined.\n This should "
+ "be like '/path/build/application/firefox/firefox'"
+ "\nIf you are running this script without the 'install' "
+ "action (where binary_path is set), please ensure you are"
+ " either:\n(1) specifying it in the config file under "
+ "binary_path\n(2) specifying it on command line with the"
+ " '--binary-path' flag")
+
+ def _query_specified_suites(self, category):
+ # logic goes: if at least one '--{category}-suite' was given,
+ # then run only that(those) given suite(s). Elif no suites were
+ # specified and the --run-all-suites flag was given,
+ # run all {category} suites. Anything else, run no suites.
+ c = self.config
+ all_suites = c.get('all_%s_suites' % (category))
+ specified_suites = c.get('specified_%s_suites' % (category)) # list
+ suites = None
+
+ if specified_suites:
+ if 'all' in specified_suites:
+ # useful if you want a quick way of saying run all suites
+ # of a specific category.
+ suites = all_suites
+ else:
+ # suites gets a dict of everything from all_suites where a key
+ # is also in specified_suites
+ suites = dict((key, all_suites.get(key)) for key in
+ specified_suites if key in all_suites.keys())
+ else:
+ if c.get('run_all_suites'): # needed if you dont specify any suites
+ suites = all_suites
+
+ return suites
+
+ def _query_try_flavor(self, category, suite):
+ flavors = {
+ "mochitest": [("plain.*", "mochitest"),
+ ("browser-chrome.*", "browser-chrome"),
+ ("mochitest-devtools-chrome.*", "devtools-chrome"),
+ ("chrome", "chrome"),
+ ("jetpack.*", "jetpack")],
+ "xpcshell": [("xpcshell", "xpcshell")],
+ "reftest": [("reftest", "reftest"),
+ ("crashtest", "crashtest")]
+ }
+ for suite_pattern, flavor in flavors.get(category, []):
+ if re.compile(suite_pattern).match(suite):
+ return flavor
+
+ def structured_output(self, suite_category, flavor=None):
+ unstructured_flavors = self.config.get('unstructured_flavors')
+ if not unstructured_flavors:
+ return False
+ if suite_category not in unstructured_flavors:
+ return True
+ if not unstructured_flavors.get(suite_category) or flavor in unstructured_flavors.get(suite_category):
+ return False
+ return True
+
+ def get_test_output_parser(self, suite_category, flavor=None, strict=False,
+ **kwargs):
+ if not self.structured_output(suite_category, flavor):
+ return DesktopUnittestOutputParser(suite_category=suite_category, **kwargs)
+ self.info("Structured output parser in use for %s." % suite_category)
+ return StructuredOutputParser(suite_category=suite_category, strict=strict, **kwargs)
+
+ # Actions {{{2
+
+ # clobber defined in BaseScript, deletes mozharness/build if exists
+ # read_buildbot_config is in BuildbotMixin.
+ # postflight_read_buildbot_config is in TestingMixin.
+ # preflight_download_and_extract is in TestingMixin.
+ # create_virtualenv is in VirtualenvMixin.
+ # preflight_install is in TestingMixin.
+ # install is in TestingMixin.
+ # upload_blobber_files is in BlobUploadMixin
+
+ @PreScriptAction('download-and-extract')
+ def _pre_download_and_extract(self, action):
+ """Abort if --artifact try syntax is used with compiled-code tests"""
+ if not self.try_message_has_flag('artifact'):
+ return
+ self.info('Artifact build requested in try syntax.')
+ rejected = []
+ compiled_code_suites = [
+ "cppunit",
+ "gtest",
+ "jittest",
+ ]
+ for category in SUITE_CATEGORIES:
+ suites = self._query_specified_suites(category) or []
+ for suite in suites:
+ if any([suite.startswith(c) for c in compiled_code_suites]):
+ rejected.append(suite)
+ break
+ if rejected:
+ self.buildbot_status(TBPL_EXCEPTION)
+ self.fatal("There are specified suites that are incompatible with "
+ "--artifact try syntax flag: {}".format(', '.join(rejected)),
+ exit_code=self.return_code)
+
+
+ def download_and_extract(self):
+ """
+ download and extract test zip / download installer
+ optimizes which subfolders to extract from tests zip
+ """
+ c = self.config
+
+ extract_dirs = None
+ if c['specific_tests_zip_dirs']:
+ extract_dirs = list(c['minimum_tests_zip_dirs'])
+ for category in c['specific_tests_zip_dirs'].keys():
+ if c['run_all_suites'] or self._query_specified_suites(category) \
+ or 'run-tests' not in self.actions:
+ extract_dirs.extend(c['specific_tests_zip_dirs'][category])
+
+ if c.get('run_all_suites'):
+ target_categories = SUITE_CATEGORIES
+ else:
+ target_categories = [cat for cat in SUITE_CATEGORIES
+ if self._query_specified_suites(cat) is not None]
+ super(DesktopUnittest, self).download_and_extract(extract_dirs=extract_dirs,
+ suite_categories=target_categories)
+
+ def stage_files(self):
+ for category in SUITE_CATEGORIES:
+ suites = self._query_specified_suites(category)
+ stage = getattr(self, '_stage_{}'.format(category), None)
+ if suites and stage:
+ stage(suites)
+
+ def _stage_files(self, bin_name=None):
+ dirs = self.query_abs_dirs()
+ abs_app_dir = self.query_abs_app_dir()
+
+ # For mac these directories are in Contents/Resources, on other
+ # platforms abs_res_dir will point to abs_app_dir.
+ abs_res_dir = self.query_abs_res_dir()
+ abs_res_components_dir = os.path.join(abs_res_dir, 'components')
+ abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
+ abs_res_extensions_dir = os.path.join(abs_res_dir, 'extensions')
+
+ if bin_name:
+ self.info('copying %s to %s' % (os.path.join(dirs['abs_test_bin_dir'],
+ bin_name), os.path.join(abs_app_dir, bin_name)))
+ shutil.copy2(os.path.join(dirs['abs_test_bin_dir'], bin_name),
+ os.path.join(abs_app_dir, bin_name))
+
+ self.copytree(dirs['abs_test_bin_components_dir'],
+ abs_res_components_dir,
+ overwrite='overwrite_if_exists')
+ self.mkdir_p(abs_res_plugins_dir)
+ self.copytree(dirs['abs_test_bin_plugins_dir'],
+ abs_res_plugins_dir,
+ overwrite='overwrite_if_exists')
+ if os.path.isdir(dirs['abs_test_extensions_dir']):
+ self.mkdir_p(abs_res_extensions_dir)
+ self.copytree(dirs['abs_test_extensions_dir'],
+ abs_res_extensions_dir,
+ overwrite='overwrite_if_exists')
+
+ def _stage_xpcshell(self, suites):
+ self._stage_files(self.config['xpcshell_name'])
+
+ def _stage_cppunittest(self, suites):
+ abs_res_dir = self.query_abs_res_dir()
+ dirs = self.query_abs_dirs()
+ abs_cppunittest_dir = dirs['abs_cppunittest_dir']
+
+ # move manifest and js fils to resources dir, where tests expect them
+ files = glob.glob(os.path.join(abs_cppunittest_dir, '*.js'))
+ files.extend(glob.glob(os.path.join(abs_cppunittest_dir, '*.manifest')))
+ for f in files:
+ self.move(f, abs_res_dir)
+
+ def _stage_gtest(self, suites):
+ abs_res_dir = self.query_abs_res_dir()
+ abs_app_dir = self.query_abs_app_dir()
+ dirs = self.query_abs_dirs()
+ abs_gtest_dir = dirs['abs_gtest_dir']
+ dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
+
+ files = glob.glob(os.path.join(dirs['abs_test_bin_plugins_dir'], 'gmp-*'))
+ files.append(os.path.join(abs_gtest_dir, 'dependentlibs.list.gtest'))
+ for f in files:
+ self.move(f, abs_res_dir)
+
+ self.copytree(os.path.join(abs_gtest_dir, 'gtest_bin'),
+ os.path.join(abs_app_dir))
+
+ def _stage_mozmill(self, suites):
+ self._stage_files()
+ dirs = self.query_abs_dirs()
+ modules = ['jsbridge', 'mozmill']
+ for module in modules:
+ self.install_module(module=os.path.join(dirs['abs_mozmill_dir'],
+ 'resources',
+ module))
+
+ # pull defined in VCSScript.
+ # preflight_run_tests defined in TestingMixin.
+
+ def run_tests(self):
+ for category in SUITE_CATEGORIES:
+ self._run_category_suites(category)
+
+ def get_timeout_for_category(self, suite_category):
+ if suite_category == 'cppunittest':
+ return 2500
+ return self.config["suite_definitions"][suite_category].get('run_timeout', 1000)
+
+ def _run_category_suites(self, suite_category):
+ """run suite(s) to a specific category"""
+ dirs = self.query_abs_dirs()
+ suites = self._query_specified_suites(suite_category)
+ abs_app_dir = self.query_abs_app_dir()
+ abs_res_dir = self.query_abs_res_dir()
+
+ if suites:
+ self.info('#### Running %s suites' % suite_category)
+ for suite in suites:
+ abs_base_cmd = self._query_abs_base_cmd(suite_category, suite)
+ cmd = abs_base_cmd[:]
+ replace_dict = {
+ 'abs_app_dir': abs_app_dir,
+
+ # Mac specific, but points to abs_app_dir on other
+ # platforms.
+ 'abs_res_dir': abs_res_dir,
+ }
+ options_list = []
+ env = {}
+ if isinstance(suites[suite], dict):
+ options_list = suites[suite].get('options', [])
+ tests_list = suites[suite].get('tests', [])
+ env = copy.deepcopy(suites[suite].get('env', {}))
+ else:
+ options_list = suites[suite]
+ tests_list = []
+
+ flavor = self._query_try_flavor(suite_category, suite)
+ try_options, try_tests = self.try_args(flavor)
+
+ cmd.extend(self.query_options(options_list,
+ try_options,
+ str_format_values=replace_dict))
+ cmd.extend(self.query_tests_args(tests_list,
+ try_tests,
+ str_format_values=replace_dict))
+
+ suite_name = suite_category + '-' + suite
+ tbpl_status, log_level = None, None
+ error_list = BaseErrorList + HarnessErrorList
+ parser = self.get_test_output_parser(suite_category,
+ flavor=flavor,
+ config=self.config,
+ error_list=error_list,
+ log_obj=self.log_obj)
+
+ if suite_category == "reftest":
+ ref_formatter = imp.load_source(
+ "ReftestFormatter",
+ os.path.abspath(
+ os.path.join(dirs["abs_reftest_dir"], "output.py")))
+ parser.formatter = ref_formatter.ReftestFormatter()
+
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+ if self.query_nodejs():
+ env['MOZ_NODE_PATH'] = self.nodejs_path
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
+ self.mkdir_p(env['MOZ_UPLOAD_DIR'])
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ env = self.query_env(partial_env=env, log_level=INFO)
+ cmd_timeout = self.get_timeout_for_category(suite_category)
+ return_code = self.run_command(cmd, cwd=dirs['abs_work_dir'],
+ output_timeout=cmd_timeout,
+ output_parser=parser,
+ env=env)
+
+ # mochitest, reftest, and xpcshell suites do not return
+ # appropriate return codes. Therefore, we must parse the output
+ # to determine what the tbpl_status and worst_log_level must
+ # be. We do this by:
+ # 1) checking to see if our mozharness script ran into any
+ # errors itself with 'num_errors' <- OutputParser
+ # 2) if num_errors is 0 then we look in the subclassed 'parser'
+ # findings for harness/suite errors <- DesktopUnittestOutputParser
+ # 3) checking to see if the return code is in success_codes
+
+ success_codes = None
+ if self._is_windows() and suite_category != 'gtest':
+ # bug 1120644
+ success_codes = [0, 1]
+
+ tbpl_status, log_level = parser.evaluate_parser(return_code,
+ success_codes=success_codes)
+ parser.append_tinderboxprint_line(suite_name)
+
+ self.buildbot_status(tbpl_status, level=log_level)
+ self.log("The %s suite: %s ran with return status: %s" %
+ (suite_category, suite, tbpl_status), level=log_level)
+ else:
+ self.debug('There were no suites to run for %s' % suite_category)
+
+
+# main {{{1
+if __name__ == '__main__':
+ desktop_unittest = DesktopUnittest()
+ desktop_unittest.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_media_tests_buildbot.py b/testing/mozharness/scripts/firefox_media_tests_buildbot.py
new file mode 100644
index 000000000..17b830f0f
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_media_tests_buildbot.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+"""firefox_media_tests_buildbot.py
+
+Author: Maja Frydrychowicz
+"""
+import copy
+import glob
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.log import DEBUG, ERROR, INFO
+from mozharness.base.script import PostScriptAction
+from mozharness.mozilla.blob_upload import (
+ BlobUploadMixin,
+ blobupload_config_options
+)
+from mozharness.mozilla.buildbot import (
+ TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE
+)
+from mozharness.mozilla.testing.firefox_media_tests import (
+ FirefoxMediaTestsBase, TESTFAILED, SUCCESS
+)
+
+
+class FirefoxMediaTestsBuildbot(FirefoxMediaTestsBase, BlobUploadMixin):
+
+ def __init__(self):
+ config_options = copy.deepcopy(blobupload_config_options)
+ super(FirefoxMediaTestsBuildbot, self).__init__(
+ config_options=config_options,
+ all_actions=['clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ )
+
+ def run_media_tests(self):
+ status = super(FirefoxMediaTestsBuildbot, self).run_media_tests()
+ if status == SUCCESS:
+ tbpl_status = TBPL_SUCCESS
+ else:
+ tbpl_status = TBPL_FAILURE
+ if status == TESTFAILED:
+ tbpl_status = TBPL_WARNING
+ self.buildbot_status(tbpl_status)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(FirefoxMediaTestsBuildbot, self).query_abs_dirs()
+ dirs = {
+ 'abs_blob_upload_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'blobber_upload_dir')
+ }
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def _query_cmd(self):
+ cmd = super(FirefoxMediaTestsBuildbot, self)._query_cmd()
+ dirs = self.query_abs_dirs()
+ # configure logging
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ cmd += ['--gecko-log', os.path.join(blob_upload_dir, 'gecko.log')]
+ cmd += ['--log-html', os.path.join(blob_upload_dir, 'media_tests.html')]
+ cmd += ['--log-mach', os.path.join(blob_upload_dir, 'media_tests_mach.log')]
+ return cmd
+
+ @PostScriptAction('run-media-tests')
+ def _collect_uploads(self, action, success=None):
+ """ Copy extra (log) files to blob upload dir. """
+ dirs = self.query_abs_dirs()
+ log_dir = dirs.get('abs_log_dir')
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ if not log_dir or not blob_upload_dir:
+ return
+ self.mkdir_p(blob_upload_dir)
+ # Move firefox-media-test screenshots into log_dir
+ screenshots_dir = os.path.join(dirs['base_work_dir'],
+ 'screenshots')
+ log_screenshots_dir = os.path.join(log_dir, 'screenshots')
+ if os.access(log_screenshots_dir, os.F_OK):
+ self.rmtree(log_screenshots_dir)
+ if os.access(screenshots_dir, os.F_OK):
+ self.move(screenshots_dir, log_screenshots_dir)
+
+ # logs to upload: broadest level (info), error, screenshots
+ uploads = glob.glob(os.path.join(log_screenshots_dir, '*'))
+ log_files = self.log_obj.log_files
+ log_level = self.log_obj.log_level
+
+ def append_path(filename, dir=log_dir):
+ if filename:
+ uploads.append(os.path.join(dir, filename))
+
+ append_path(log_files.get(ERROR))
+ # never upload debug logs
+ if log_level == DEBUG:
+ append_path(log_files.get(INFO))
+ else:
+ append_path(log_files.get(log_level))
+ # in case of SimpleFileLogger
+ append_path(log_files.get('default'))
+ for f in uploads:
+ if os.access(f, os.F_OK):
+ dest = os.path.join(blob_upload_dir, os.path.basename(f))
+ self.copyfile(f, dest)
+
+
+if __name__ == '__main__':
+ media_test = FirefoxMediaTestsBuildbot()
+ media_test.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_media_tests_jenkins.py b/testing/mozharness/scripts/firefox_media_tests_jenkins.py
new file mode 100755
index 000000000..e35655257
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_media_tests_jenkins.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+"""firefox_media_tests_jenkins.py
+
+Author: Syd Polk
+"""
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.firefox_media_tests import (
+ FirefoxMediaTestsBase
+)
+
+
+class FirefoxMediaTestsJenkins(FirefoxMediaTestsBase):
+
+ def __init__(self):
+ super(FirefoxMediaTestsJenkins, self).__init__(
+ all_actions=['clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ )
+
+ def _query_cmd(self):
+ cmd = super(FirefoxMediaTestsJenkins, self)._query_cmd()
+
+ dirs = self.query_abs_dirs()
+
+ # configure logging
+ log_dir = dirs.get('abs_log_dir')
+ cmd += ['--gecko-log', os.path.join(log_dir, 'gecko.log')]
+ cmd += ['--log-html', os.path.join(log_dir, 'media_tests.html')]
+ cmd += ['--log-mach', os.path.join(log_dir, 'media_tests_mach.log')]
+
+ return cmd
+
+if __name__ == '__main__':
+ media_test = FirefoxMediaTestsJenkins()
+ media_test.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_media_tests_taskcluster.py b/testing/mozharness/scripts/firefox_media_tests_taskcluster.py
new file mode 100644
index 000000000..7a0121dca
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_media_tests_taskcluster.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+"""firefox_media_tests_taskcluster.py
+
+Adapted from firefox_media_tests_buildbot.py
+
+Author: Bryce Van Dyk
+"""
+import copy
+import glob
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.log import DEBUG, ERROR, INFO
+from mozharness.base.script import PostScriptAction
+from mozharness.mozilla.blob_upload import (
+ BlobUploadMixin,
+ blobupload_config_options
+)
+from mozharness.mozilla.testing.firefox_media_tests import (
+ FirefoxMediaTestsBase, TESTFAILED, SUCCESS
+)
+
+
+class FirefoxMediaTestsTaskcluster(FirefoxMediaTestsBase):
+
+ def __init__(self):
+ config_options = copy.deepcopy(blobupload_config_options)
+ super(FirefoxMediaTestsTaskcluster, self).__init__(
+ config_options=config_options,
+ all_actions=['clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ )
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(FirefoxMediaTestsTaskcluster, self).query_abs_dirs()
+ dirs = {
+ 'abs_blob_upload_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'blobber_upload_dir')
+ }
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def _query_cmd(self):
+ cmd = super(FirefoxMediaTestsTaskcluster, self)._query_cmd()
+ dirs = self.query_abs_dirs()
+ # configure logging
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ cmd += ['--gecko-log', os.path.join(blob_upload_dir, 'gecko.log')]
+ cmd += ['--log-html', os.path.join(blob_upload_dir, 'media_tests.html')]
+ cmd += ['--log-mach', os.path.join(blob_upload_dir, 'media_tests_mach.log')]
+ return cmd
+
+ @PostScriptAction('run-media-tests')
+ def _collect_uploads(self, action, success=None):
+ """ Copy extra (log) files to blob upload dir. """
+ dirs = self.query_abs_dirs()
+ log_dir = dirs.get('abs_log_dir')
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ if not log_dir or not blob_upload_dir:
+ return
+ self.mkdir_p(blob_upload_dir)
+ # Move firefox-media-test screenshots into log_dir
+ screenshots_dir = os.path.join(dirs['base_work_dir'],
+ 'screenshots')
+ log_screenshots_dir = os.path.join(log_dir, 'screenshots')
+ if os.access(log_screenshots_dir, os.F_OK):
+ self.rmtree(log_screenshots_dir)
+ if os.access(screenshots_dir, os.F_OK):
+ self.move(screenshots_dir, log_screenshots_dir)
+
+ # logs to upload: broadest level (info), error, screenshots
+ uploads = glob.glob(os.path.join(log_screenshots_dir, '*'))
+ log_files = self.log_obj.log_files
+ log_level = self.log_obj.log_level
+
+ def append_path(filename, dir=log_dir):
+ if filename:
+ uploads.append(os.path.join(dir, filename))
+
+ append_path(log_files.get(ERROR))
+ # never upload debug logs
+ if log_level == DEBUG:
+ append_path(log_files.get(INFO))
+ else:
+ append_path(log_files.get(log_level))
+ # in case of SimpleFileLogger
+ append_path(log_files.get('default'))
+ for f in uploads:
+ if os.access(f, os.F_OK):
+ dest = os.path.join(blob_upload_dir, os.path.basename(f))
+ self.copyfile(f, dest)
+
+
+if __name__ == '__main__':
+ media_test = FirefoxMediaTestsTaskcluster()
+ media_test.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_ui_tests/functional.py b/testing/mozharness/scripts/firefox_ui_tests/functional.py
new file mode 100755
index 000000000..58048ad33
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_ui_tests/functional.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.mozilla.testing.firefox_ui_tests import FirefoxUIFunctionalTests
+
+
+if __name__ == '__main__':
+ myScript = FirefoxUIFunctionalTests()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_ui_tests/update.py b/testing/mozharness/scripts/firefox_ui_tests/update.py
new file mode 100755
index 000000000..c8f5842b7
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_ui_tests/update.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.mozilla.testing.firefox_ui_tests import FirefoxUIUpdateTests
+
+
+if __name__ == '__main__':
+ myScript = FirefoxUIUpdateTests()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_ui_tests/update_release.py b/testing/mozharness/scripts/firefox_ui_tests/update_release.py
new file mode 100755
index 000000000..f1ec81646
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_ui_tests/update_release.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import copy
+import os
+import pprint
+import sys
+import urllib
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import PreScriptAction
+from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WARNING, EXIT_STATUS_DICT
+from mozharness.mozilla.testing.firefox_ui_tests import (
+ FirefoxUIUpdateTests,
+ firefox_ui_update_config_options
+)
+
+
+# Command line arguments for release update tests
+firefox_ui_update_release_config_options = [
+ [['--build-number'], {
+ 'dest': 'build_number',
+ 'help': 'Build number of release, eg: 2',
+ }],
+ [['--limit-locales'], {
+ 'dest': 'limit_locales',
+ 'default': -1,
+ 'type': int,
+ 'help': 'Limit the number of locales to run.',
+ }],
+ [['--release-update-config'], {
+ 'dest': 'release_update_config',
+ 'help': 'Name of the release update verification config file to use.',
+ }],
+ [['--this-chunk'], {
+ 'dest': 'this_chunk',
+ 'default': 1,
+ 'help': 'What chunk of locales to process.',
+ }],
+ [['--tools-repo'], {
+ 'dest': 'tools_repo',
+ 'default': 'http://hg.mozilla.org/build/tools',
+ 'help': 'Which tools repo to check out',
+ }],
+ [['--tools-tag'], {
+ 'dest': 'tools_tag',
+ 'help': 'Which revision/tag to use for the tools repository.',
+ }],
+ [['--total-chunks'], {
+ 'dest': 'total_chunks',
+ 'default': 1,
+ 'help': 'Total chunks to dive the locales into.',
+ }],
+] + copy.deepcopy(firefox_ui_update_config_options)
+
+
+class ReleaseFirefoxUIUpdateTests(FirefoxUIUpdateTests):
+
+ def __init__(self):
+ all_actions = [
+ 'clobber',
+ 'checkout',
+ 'create-virtualenv',
+ 'query_minidump_stackwalk',
+ 'read-release-update-config',
+ 'run-tests',
+ ]
+
+ super(ReleaseFirefoxUIUpdateTests, self).__init__(
+ all_actions=all_actions,
+ default_actions=all_actions,
+ config_options=firefox_ui_update_release_config_options,
+ append_env_variables_from_configs=True,
+ )
+
+ self.tools_repo = self.config.get('tools_repo')
+ self.tools_tag = self.config.get('tools_tag')
+
+ assert self.tools_repo and self.tools_tag, \
+ 'Without the "--tools-tag" we can\'t clone the releng\'s tools repository.'
+
+ self.limit_locales = int(self.config.get('limit_locales'))
+
+ # This will be a list containing one item per release based on configs
+ # from tools/release/updates/*cfg
+ self.releases = None
+
+ def checkout(self):
+ """
+ We checkout the tools repository and update to the right branch
+ for it.
+ """
+ dirs = self.query_abs_dirs()
+
+ super(ReleaseFirefoxUIUpdateTests, self).checkout()
+
+ self.vcs_checkout(
+ repo=self.tools_repo,
+ dest=dirs['abs_tools_dir'],
+ branch=self.tools_tag,
+ vcs='hg'
+ )
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+
+ abs_dirs = super(ReleaseFirefoxUIUpdateTests, self).query_abs_dirs()
+ dirs = {
+ 'abs_tools_dir': os.path.join(abs_dirs['abs_work_dir'], 'tools'),
+ }
+
+ for key in dirs:
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def read_release_update_config(self):
+ '''
+ Builds a testing matrix based on an update verification configuration
+ file under the tools repository (release/updates/*.cfg).
+
+ Each release info line of the update verification files look similar to the following.
+
+ NOTE: This shows each pair of information as a new line but in reality
+ there is one white space separting them. We only show the values we care for.
+
+ release="38.0"
+ platform="Linux_x86_64-gcc3"
+ build_id="20150429135941"
+ locales="ach af ... zh-TW"
+ channel="beta-localtest"
+ from="/firefox/releases/38.0b9/linux-x86_64/%locale%/firefox-38.0b9.tar.bz2"
+ ftp_server_from="http://archive.mozilla.org/pub"
+
+ We will store this information in self.releases as a list of releases.
+
+ NOTE: We will talk of full and quick releases. Full release info normally contains a subset
+ of all locales (except for the most recent releases). A quick release has all locales,
+ however, it misses the fields 'from' and 'ftp_server_from'.
+ Both pairs of information complement each other but differ in such manner.
+ '''
+ dirs = self.query_abs_dirs()
+ assert os.path.exists(dirs['abs_tools_dir']), \
+ 'Without the tools/ checkout we can\'t use releng\'s config parser.'
+
+ if self.config.get('release_update_config'):
+ # The config file is part of the tools repository. Make sure that if specified
+ # we force a revision of that repository to be set.
+ if self.tools_tag is None:
+ self.fatal('Make sure to specify the --tools-tag')
+
+ self.release_update_config = self.config['release_update_config']
+
+ # Import the config parser
+ sys.path.insert(1, os.path.join(dirs['abs_tools_dir'], 'lib', 'python'))
+ from release.updates.verify import UpdateVerifyConfig
+
+ uvc = UpdateVerifyConfig()
+ config_file = os.path.join(dirs['abs_tools_dir'], 'release', 'updates',
+ self.config['release_update_config'])
+ uvc.read(config_file)
+ if not hasattr(self, 'update_channel'):
+ self.update_channel = uvc.channel
+
+ # Filter out any releases that are less than Gecko 38
+ uvc.releases = [r for r in uvc.releases
+ if int(r['release'].split('.')[0]) >= 38]
+
+ temp_releases = []
+ for rel_info in uvc.releases:
+ # This is the full release info
+ if 'from' in rel_info and rel_info['from'] is not None:
+ # Let's find the associated quick release which contains the remaining locales
+ # for all releases except for the most recent release which contain all locales
+ quick_release = uvc.getRelease(build_id=rel_info['build_id'], from_path=None)
+ if quick_release != {}:
+ rel_info['locales'] = sorted(rel_info['locales'] + quick_release['locales'])
+ temp_releases.append(rel_info)
+
+ uvc.releases = temp_releases
+ chunked_config = uvc.getChunk(
+ chunks=int(self.config['total_chunks']),
+ thisChunk=int(self.config['this_chunk'])
+ )
+
+ self.releases = chunked_config.releases
+
+ @PreScriptAction('run-tests')
+ def _pre_run_tests(self, action):
+ assert ('release_update_config' in self.config or
+ self.installer_url or self.installer_path), \
+ 'Either specify --update-verify-config, --installer-url or --installer-path.'
+
+ def run_tests(self):
+ dirs = self.query_abs_dirs()
+
+ # We don't want multiple outputs of the same environment information. To prevent
+ # that, we can't make it an argument of run_command and have to print it on our own.
+ self.info('Using env: {}'.format(pprint.pformat(self.query_env())))
+
+ results = {}
+
+ locales_counter = 0
+ for rel_info in sorted(self.releases, key=lambda release: release['build_id']):
+ build_id = rel_info['build_id']
+ results[build_id] = {}
+
+ self.info('About to run {buildid} {path} - {num_locales} locales'.format(
+ buildid=build_id,
+ path=rel_info['from'],
+ num_locales=len(rel_info['locales'])
+ ))
+
+ # Each locale gets a fresh port to avoid address in use errors in case of
+ # tests that time out unexpectedly.
+ marionette_port = 2827
+ for locale in rel_info['locales']:
+ locales_counter += 1
+ self.info('Running {buildid} {locale}'.format(buildid=build_id,
+ locale=locale))
+
+ if self.limit_locales > -1 and locales_counter > self.limit_locales:
+ self.info('We have reached the limit of locales we were intending to run')
+ break
+
+ if self.config['dry_run']:
+ continue
+
+ # Determine from where to download the file
+ installer_url = '{server}/{fragment}'.format(
+ server=rel_info['ftp_server_from'],
+ fragment=urllib.quote(rel_info['from'].replace('%locale%', locale))
+ )
+ installer_path = self.download_file(
+ url=installer_url,
+ parent_dir=dirs['abs_work_dir']
+ )
+
+ binary_path = self.install_app(app=self.config.get('application'),
+ installer_path=installer_path)
+
+ marionette_port += 1
+
+ retcode = self.run_test(
+ binary_path=binary_path,
+ env=self.query_env(avoid_host_env=True),
+ marionette_port=marionette_port,
+ )
+
+ self.uninstall_app()
+
+ # Remove installer which is not needed anymore
+ self.info('Removing {}'.format(installer_path))
+ os.remove(installer_path)
+
+ if retcode:
+ self.warning('FAIL: {} has failed.'.format(sys.argv[0]))
+
+ base_cmd = 'python {command} --firefox-ui-branch {branch} ' \
+ '--release-update-config {config} --tools-tag {tag}'.format(
+ command=sys.argv[0],
+ branch=self.firefox_ui_branch,
+ config=self.release_update_config,
+ tag=self.tools_tag
+ )
+
+ for config in self.config['config_files']:
+ base_cmd += ' --cfg {}'.format(config)
+
+ if self.symbols_url:
+ base_cmd += ' --symbols-path {}'.format(self.symbols_url)
+
+ base_cmd += ' --installer-url {}'.format(installer_url)
+
+ self.info('You can run the *specific* locale on the same machine with:')
+ self.info(base_cmd)
+
+ self.info('You can run the *specific* locale on *your* machine with:')
+ self.info('{} --cfg developer_config.py'.format(base_cmd))
+
+ results[build_id][locale] = retcode
+
+ self.info('Completed {buildid} {locale} with return code: {retcode}'.format(
+ buildid=build_id,
+ locale=locale,
+ retcode=retcode))
+
+ if self.limit_locales > -1 and locales_counter > self.limit_locales:
+ break
+
+ # Determine which locales have failed and set scripts exit code
+ exit_status = TBPL_SUCCESS
+ for build_id in sorted(results.keys()):
+ failed_locales = []
+ for locale in sorted(results[build_id].keys()):
+ if results[build_id][locale] != 0:
+ failed_locales.append(locale)
+
+ if failed_locales:
+ if exit_status == TBPL_SUCCESS:
+ self.info('\nSUMMARY - Failed locales for {}:'.format(self.cli_script))
+ self.info('====================================================')
+ exit_status = TBPL_WARNING
+
+ self.info(build_id)
+ self.info(' {}'.format(', '.join(failed_locales)))
+
+ self.return_code = EXIT_STATUS_DICT[exit_status]
+
+
+if __name__ == '__main__':
+ myScript = ReleaseFirefoxUIUpdateTests()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/fx_desktop_build.py b/testing/mozharness/scripts/fx_desktop_build.py
new file mode 100755
index 000000000..40f20442c
--- /dev/null
+++ b/testing/mozharness/scripts/fx_desktop_build.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""fx_desktop_build.py.
+
+script harness to build nightly firefox within Mozilla's build environment
+and developer machines alike
+
+author: Jordan Lund
+
+"""
+
+import copy
+import pprint
+import sys
+import os
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+import mozharness.base.script as script
+from mozharness.mozilla.building.buildbase import BUILD_BASE_CONFIG_OPTIONS, \
+ BuildingConfig, BuildOptionParser, BuildScript
+from mozharness.base.config import parse_config_file
+from mozharness.mozilla.testing.try_tools import TryToolsMixin, try_config_options
+
+
+class FxDesktopBuild(BuildScript, TryToolsMixin, object):
+ def __init__(self):
+ buildscript_kwargs = {
+ 'config_options': BUILD_BASE_CONFIG_OPTIONS + copy.deepcopy(try_config_options),
+ 'all_actions': [
+ 'get-secrets',
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files', # upload from BB to TC
+ 'sendchange',
+ 'check-test',
+ 'valgrind-test',
+ 'package-source',
+ 'generate-source-signing-manifest',
+ 'multi-l10n',
+ 'generate-build-stats',
+ 'update',
+ ],
+ 'require_config_file': True,
+ # Default configuration
+ 'config': {
+ 'is_automation': True,
+ "pgo_build": False,
+ "debug_build": False,
+ "pgo_platforms": ['linux', 'linux64', 'win32', 'win64'],
+ # nightly stuff
+ "nightly_build": False,
+ 'balrog_credentials_file': 'oauth.txt',
+ 'taskcluster_credentials_file': 'oauth.txt',
+ 'periodic_clobber': 168,
+ # hg tool stuff
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ # Seed all clones with mozilla-unified. This ensures subsequent
+ # jobs have a minimal `hg pull`.
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ "repo_base": "https://hg.mozilla.org",
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ "graph_selector": "/server/collect.cgi",
+ # only used for make uploadsymbols
+ 'old_packages': [
+ "%(objdir)s/dist/firefox-*",
+ "%(objdir)s/dist/fennec*",
+ "%(objdir)s/dist/seamonkey*",
+ "%(objdir)s/dist/thunderbird*",
+ "%(objdir)s/dist/install/sea/*.exe"
+ ],
+ 'stage_product': 'firefox',
+ 'platform_supports_post_upload_to_latest': True,
+ 'build_resources_path': '%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
+ 'nightly_promotion_branches': ['mozilla-central', 'mozilla-aurora'],
+
+ # try will overwrite these
+ 'clone_with_purge': False,
+ 'clone_by_revision': False,
+ 'tinderbox_build_dir': None,
+ 'to_tinderbox_dated': True,
+ 'release_to_try_builds': False,
+ 'include_post_upload_builddir': False,
+ 'use_clobberer': True,
+
+ 'stage_username': 'ffxbld',
+ 'stage_ssh_key': 'ffxbld_rsa',
+ 'virtualenv_modules': [
+ 'requests==2.8.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.26',
+ ],
+ 'virtualenv_path': 'venv',
+ #
+
+ },
+ 'ConfigClass': BuildingConfig,
+ }
+ super(FxDesktopBuild, self).__init__(**buildscript_kwargs)
+
+ def _pre_config_lock(self, rw_config):
+ """grab buildbot props if we are running this in automation"""
+ super(FxDesktopBuild, self)._pre_config_lock(rw_config)
+ c = self.config
+ if c['is_automation']:
+ # parse buildbot config and add it to self.config
+ self.info("We are running this in buildbot, grab the build props")
+ self.read_buildbot_config()
+ ###
+ if c.get('stage_platform'):
+ platform_for_log_url = c['stage_platform']
+ if c.get('pgo_build'):
+ platform_for_log_url += '-pgo'
+ # postrun.py uses stage_platform buildbot prop as part of the log url
+ self.set_buildbot_property('stage_platform',
+ platform_for_log_url,
+ write_to_file=True)
+ else:
+ self.fatal("'stage_platform' not determined and is required in your config")
+
+ if self.try_message_has_flag('artifact'):
+ self.info('Artifact build requested in try syntax.')
+ variant = 'artifact'
+ if c.get('build_variant') in ['debug', 'cross-debug']:
+ variant = 'debug-artifact'
+ self._update_build_variant(rw_config, variant)
+
+ # helpers
+ def _update_build_variant(self, rw_config, variant='artifact'):
+ """ Intended for use in _pre_config_lock """
+ c = self.config
+ variant_cfg_path, _ = BuildOptionParser.find_variant_cfg_path(
+ '--custom-build-variant-cfg',
+ variant,
+ rw_config.config_parser
+ )
+ if not variant_cfg_path:
+ self.fatal('Could not find appropriate config file for variant %s' % variant)
+ # Update other parts of config to keep dump-config accurate
+ # Only dump-config is affected because most config info is set during
+ # initial parsing
+ variant_cfg_dict = parse_config_file(variant_cfg_path)
+ rw_config.all_cfg_files_and_dicts.append((variant_cfg_path, variant_cfg_dict))
+ c.update({
+ 'build_variant': variant,
+ 'config_files': c['config_files'] + [variant_cfg_path]
+ })
+
+ self.info("Updating self.config with the following from {}:".format(variant_cfg_path))
+ self.info(pprint.pformat(variant_cfg_dict))
+ c.update(variant_cfg_dict)
+ c['forced_artifact_build'] = True
+ # Bug 1231320 adds MOZHARNESS_ACTIONS in TaskCluster tasks to override default_actions
+ # We don't want that when forcing an artifact build.
+ if rw_config.volatile_config['actions']:
+ self.info("Updating volatile_config to include default_actions "
+ "from {}.".format(variant_cfg_path))
+ # add default actions in correct order
+ combined_actions = []
+ for a in rw_config.all_actions:
+ if a in c['default_actions'] or a in rw_config.volatile_config['actions']:
+ combined_actions.append(a)
+ rw_config.volatile_config['actions'] = combined_actions
+ self.info("Actions in volatile_config are now: {}".format(
+ rw_config.volatile_config['actions'])
+ )
+ # replace rw_config as well to set actions as in BaseScript
+ rw_config.set_config(c, overwrite=True)
+ rw_config.update_actions()
+ self.actions = tuple(rw_config.actions)
+ self.all_actions = tuple(rw_config.all_actions)
+
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ c = self.config
+ abs_dirs = super(FxDesktopBuild, self).query_abs_dirs()
+ if not c.get('app_ini_path'):
+ self.fatal('"app_ini_path" is needed in your config for this '
+ 'script.')
+
+ dirs = {
+ # BuildFactories in factory.py refer to a 'build' dir on the slave.
+ # This contains all the source code/objdir to compile. However,
+ # there is already a build dir in mozharness for every mh run. The
+ # 'build' that factory refers to I named: 'src' so
+ # there is a seperation in mh. for example, rather than having
+ # '{mozharness_repo}/build/build/', I have '{
+ # mozharness_repo}/build/src/'
+ 'abs_src_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'src'),
+ 'abs_obj_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'src',
+ self._query_objdir()),
+ 'abs_tools_dir': os.path.join(abs_dirs['abs_work_dir'], 'tools'),
+ 'abs_app_ini_path': c['app_ini_path'] % {
+ 'obj_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'src',
+ self._query_objdir())
+ },
+ }
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ # Actions {{{2
+ # read_buildbot_config in BuildingMixin
+ # clobber in BuildingMixin -> PurgeMixin
+ # if Linux config:
+ # reset_mock in BuildingMixing -> MockMixin
+ # setup_mock in BuildingMixing (overrides MockMixin.mock_setup)
+
+ def set_extra_try_arguments(self, action, success=None):
+ """ Override unneeded method from TryToolsMixin """
+ pass
+
+ @script.PreScriptRun
+ def suppress_windows_modal_dialogs(self, *args, **kwargs):
+ if self._is_windows():
+ # Suppress Windows modal dialogs to avoid hangs
+ import ctypes
+ ctypes.windll.kernel32.SetErrorMode(0x8001)
+
+if __name__ == '__main__':
+ fx_desktop_build = FxDesktopBuild()
+ fx_desktop_build.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_build_integration.py b/testing/mozharness/scripts/gaia_build_integration.py
new file mode 100755
index 000000000..32d188ffd
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_build_integration.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaBuildIntegrationTest(GaiaTest):
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the integration test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.node_setup()
+
+ output_parser = TestSummaryOutputParserHelper(
+ config=self.config, log_obj=self.log_obj, error_list=self.error_list)
+
+ cmd = [
+ 'make',
+ 'build-test-integration',
+ 'REPORTER=mocha-tbpl-reporter',
+ 'NODE_MODULES_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1',
+ 'TRY_ENV=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'],
+ output_parser=output_parser,
+ output_timeout=600)
+
+ output_parser.print_summary('gaia-build-integration-tests')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_build_integration_test = GaiaBuildIntegrationTest()
+ gaia_build_integration_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_build_unit.py b/testing/mozharness/scripts/gaia_build_unit.py
new file mode 100755
index 000000000..c16ce99fa
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_build_unit.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaBuildUnitTest(GaiaTest):
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the gaia build unit test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.node_setup()
+
+ output_parser = TestSummaryOutputParserHelper(
+ config=self.config, log_obj=self.log_obj, error_list=self.error_list)
+
+ cmd = [
+ 'make',
+ 'build-test-unit',
+ 'REPORTER=mocha-tbpl-reporter',
+ 'NODE_MODULES_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1',
+ 'TRY_ENV=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'],
+ output_parser=output_parser,
+ output_timeout=330)
+
+ output_parser.print_summary('gaia-build-unit-tests')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_build_unit_test = GaiaBuildUnitTest()
+ gaia_build_unit_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_integration.py b/testing/mozharness/scripts/gaia_integration.py
new file mode 100644
index 000000000..3edb8b964
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_integration.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaIntegrationTest(GaiaTest):
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the integration test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.node_setup()
+
+ output_parser = TestSummaryOutputParserHelper(
+ config=self.config, log_obj=self.log_obj, error_list=self.error_list)
+
+ # Bug 1046694 - add environment variables which govern test chunking
+ env = {}
+ if self.config.get('this_chunk') and self.config.get('total_chunks'):
+ env["PART"] = self.config.get('this_chunk')
+ env["NBPARTS"] = self.config.get('total_chunks')
+ env = self.query_env(partial_env=env)
+
+ # Bug 1137884 - marionette-js-runner needs to know about virtualenv
+ gaia_runner_service = (
+ dirs['abs_gaia_dir'] +
+ '/node_modules/marionette-js-runner/host/python/runner-service')
+ # Check whether python package is around since there exist versions
+ # of gaia that depend on versions of marionette-js-runner without
+ # the python stuff.
+ if os.path.exists(gaia_runner_service):
+ self.install_module('gaia-runner-service', gaia_runner_service)
+ env['VIRTUALENV_PATH'] = self.query_virtualenv_path()
+ env['HOST_LOG'] = os.path.join(dirs['abs_log_dir'], 'gecko_output.log')
+
+ cmd = [
+ 'make',
+ 'test-integration',
+ 'REPORTER=mocha-tbpl-reporter',
+ 'TEST_MANIFEST=./shared/test/integration/tbpl-manifest.json',
+ 'NODE_MODULE_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'], env=env,
+ output_parser=output_parser,
+ output_timeout=330)
+
+ output_parser.print_summary('gaia-integration-tests')
+ self.publish(code, passed=output_parser.passed, failed=output_parser.failed)
+
+if __name__ == '__main__':
+ gaia_integration_test = GaiaIntegrationTest()
+ gaia_integration_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_linter.py b/testing/mozharness/scripts/gaia_linter.py
new file mode 100755
index 000000000..e4441b92b
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_linter.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import re
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.log import OutputParser, ERROR
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+
+
+class GaiaLinterOutputParser(OutputParser):
+
+ JSHINT_START = "Running jshint..."
+ JSHINT_DONE = "xfailed)"
+ JSHINT_ERROR = re.compile('(.+): (.*?) \(ERROR\)')
+
+ LAST_FILE = re.compile('----- FILE : (.*?) -----')
+
+ GJSLINT_START = "Running gjslint..."
+ GJSLINT_ERROR = re.compile('Line (\d+), E:(\d+):')
+
+ GENERAL_ERRORS = (re.compile('make(.*?)\*\*\*(.*?)Error'),)
+
+ def __init__(self, **kwargs):
+ self.base_dir = kwargs.pop('base_dir')
+ super(GaiaLinterOutputParser, self).__init__(**kwargs)
+ self.in_jshint = False
+ self.in_gjslint = False
+ self.last_file = 'unknown'
+
+ def log_error(self, message, filename=None):
+ if not filename:
+ self.log('TEST-UNEXPECTED-FAIL | make lint | %s' % message)
+ else:
+ path = filename
+ if self.base_dir in path:
+ path = os.path.relpath(filename, self.base_dir)
+ self.log('TEST-UNEXPECTED-FAIL | %s | %s' % (path, message),
+ level=ERROR)
+ self.num_errors += 1
+ self.worst_log_level = self.worst_level(ERROR,
+ self.worst_log_level)
+
+ def parse_single_line(self, line):
+ if not self.in_jshint:
+ if self.JSHINT_START in line:
+ self.in_jshint = True
+ self.in_gjslint = False
+ else:
+ if self.JSHINT_DONE in line:
+ self.in_jshint = False
+
+ if not self.in_gjslint:
+ if self.GJSLINT_START in line:
+ self.in_gjslint = True
+
+ if self.in_jshint:
+ m = self.JSHINT_ERROR.search(line)
+ if m:
+ self.log_error(m.groups()[1], m.groups()[0])
+
+ if self.in_gjslint:
+ m = self.LAST_FILE.search(line)
+ if m:
+ self.last_file = m.groups()[0]
+
+ m = self.GJSLINT_ERROR.search(line)
+ if m:
+ self.log_error(line, self.last_file)
+
+ for an_error in self.GENERAL_ERRORS:
+ if an_error.search(line):
+ self.log_error(line)
+
+ if self.log_output:
+ self.info(' %s' % line)
+
+ def evaluate_parser(self):
+ # generate the TinderboxPrint line for TBPL
+ if self.num_errors:
+ self.tsummary = '<em class="testfail">%d errors</em>' % self.num_errors
+ else:
+ self.tsummary = "0 errors"
+
+ def print_summary(self, suite_name):
+ self.evaluate_parser()
+ self.info("TinderboxPrint: %s: %s\n" % (suite_name, self.tsummary))
+
+
+class GaiaIntegrationTest(GaiaTest):
+
+ virtualenv_modules = ['closure_linter==2.3.13',
+ 'python-gflags',
+ ]
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the integration test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ # Copy the b2g desktop we built to the gaia directory so that it
+ # gets used by the marionette-js-runner.
+ self.copytree(
+ os.path.join(os.path.dirname(self.binary_path)),
+ os.path.join(dirs['abs_gaia_dir'], 'b2g'),
+ overwrite='clobber'
+ )
+
+ cmd = [
+ 'make',
+ 'lint',
+ 'NODE_MODULES_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ self.make_node_modules()
+
+ output_parser = GaiaLinterOutputParser(
+ base_dir=dirs['abs_gaia_dir'],
+ config=self.config,
+ log_obj=self.log_obj)
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'],
+ output_parser=output_parser,
+ output_timeout=600)
+
+ output_parser.print_summary('gaia-lint')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_integration_test = GaiaIntegrationTest()
+ gaia_integration_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_unit.py b/testing/mozharness/scripts/gaia_unit.py
new file mode 100755
index 000000000..660643b74
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_unit.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+import glob
+import subprocess
+import json
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaUnitTest(GaiaTest):
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def pull(self, **kwargs):
+ GaiaTest.pull(self, **kwargs)
+
+ def run_tests(self):
+ """
+ Run the unit test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.make_node_modules()
+
+ # make the gaia profile
+ self.make_gaia(dirs['abs_gaia_dir'],
+ self.config.get('xre_path'),
+ xre_url=self.config.get('xre_url'),
+ debug=True)
+
+ # build the testrunner command arguments
+ python = self.query_python_path('python')
+ cmd = [python, '-u', os.path.join(dirs['abs_runner_dir'],
+ 'gaia_unit_test',
+ 'main.py')]
+ executable = 'firefox'
+ if 'b2g' in self.binary_path:
+ executable = 'b2g-bin'
+
+ profile = os.path.join(dirs['abs_gaia_dir'], 'profile-debug')
+ binary = os.path.join(os.path.dirname(self.binary_path), executable)
+ cmd.extend(self._build_arg('--binary', binary))
+ cmd.extend(self._build_arg('--profile', profile))
+ cmd.extend(self._build_arg('--symbols-path', self.symbols_path))
+ cmd.extend(self._build_arg('--browser-arg', self.config.get('browser_arg')))
+
+ # Add support for chunking
+ if self.config.get('total_chunks') and self.config.get('this_chunk'):
+ chunker = [ os.path.join(dirs['abs_gaia_dir'], 'bin', 'chunk'),
+ self.config.get('total_chunks'), self.config.get('this_chunk') ]
+
+ disabled_tests = []
+ disabled_manifest = os.path.join(dirs['abs_runner_dir'],
+ 'gaia_unit_test',
+ 'disabled.json')
+ with open(disabled_manifest, 'r') as m:
+ try:
+ disabled_tests = json.loads(m.read())
+ except:
+ print "Error while decoding disabled.json; please make sure this file has valid JSON syntax."
+ sys.exit(1)
+
+ # Construct a list of all tests
+ unit_tests = []
+ for path in ('apps', 'tv_apps'):
+ test_root = os.path.join(dirs['abs_gaia_dir'], path)
+ full_paths = glob.glob(os.path.join(test_root, '*/test/unit/*_test.js'))
+ unit_tests += map(lambda x: os.path.relpath(x, test_root), full_paths)
+
+ # Remove the tests that are disabled
+ active_unit_tests = filter(lambda x: x not in disabled_tests, unit_tests)
+
+ # Chunk the list as requested
+ tests_to_run = subprocess.check_output(chunker + active_unit_tests).strip().split(' ')
+
+ cmd.extend(tests_to_run)
+
+ output_parser = TestSummaryOutputParserHelper(config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list)
+
+ upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.isdir(upload_dir):
+ self.mkdir_p(upload_dir)
+
+ env = self.query_env()
+ env['MOZ_UPLOAD_DIR'] = upload_dir
+ # I don't like this output_timeout hardcode, but bug 920153
+ code = self.run_command(cmd, env=env,
+ output_parser=output_parser,
+ output_timeout=1760)
+
+ output_parser.print_summary('gaia-unit-tests')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_unit_test = GaiaUnitTest()
+ gaia_unit_test.run_and_exit()
diff --git a/testing/mozharness/scripts/marionette.py b/testing/mozharness/scripts/marionette.py
new file mode 100755
index 000000000..b7f9c2765
--- /dev/null
+++ b/testing/mozharness/scripts/marionette.py
@@ -0,0 +1,358 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import copy
+import os
+import re
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import TarErrorList
+from mozharness.base.log import INFO, ERROR, WARNING
+from mozharness.base.script import PreScriptAction
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.testing.errors import LogcatErrorList
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+
+# TODO: we could remove emulator specific code after B2G ICS emulator buildbot
+# builds is turned off, Bug 1209180.
+
+
+class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin):
+ config_options = [[
+ ["--application"],
+ {"action": "store",
+ "dest": "application",
+ "default": None,
+ "help": "application name of binary"
+ }
+ ], [
+ ["--app-arg"],
+ {"action": "store",
+ "dest": "app_arg",
+ "default": None,
+ "help": "Optional command-line argument to pass to the browser"
+ }
+ ], [
+ ["--marionette-address"],
+ {"action": "store",
+ "dest": "marionette_address",
+ "default": None,
+ "help": "The host:port of the Marionette server running inside Gecko. Unused for emulator testing",
+ }
+ ], [
+ ["--emulator"],
+ {"action": "store",
+ "type": "choice",
+ "choices": ['arm', 'x86'],
+ "dest": "emulator",
+ "default": None,
+ "help": "Use an emulator for testing",
+ }
+ ], [
+ ["--test-manifest"],
+ {"action": "store",
+ "dest": "test_manifest",
+ "default": "unit-tests.ini",
+ "help": "Path to test manifest to run relative to the Marionette "
+ "tests directory",
+ }
+ ], [
+ ["--total-chunks"],
+ {"action": "store",
+ "dest": "total_chunks",
+ "help": "Number of total chunks",
+ }
+ ], [
+ ["--this-chunk"],
+ {"action": "store",
+ "dest": "this_chunk",
+ "help": "Number of this chunk",
+ }
+ ], [
+ ["--e10s"],
+ {"action": "store_true",
+ "dest": "e10s",
+ "default": False,
+ "help": "Run tests with multiple processes. (Desktop builds only)",
+ }
+ ], [
+ ["--allow-software-gl-layers"],
+ {"action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."
+ }
+ ]] + copy.deepcopy(testing_config_options) \
+ + copy.deepcopy(blobupload_config_options)
+
+ error_list = [
+ {'substr': 'FAILED (errors=', 'level': WARNING},
+ {'substr': r'''Could not successfully complete transport of message to Gecko, socket closed''', 'level': ERROR},
+ {'substr': r'''Connection to Marionette server is lost. Check gecko''', 'level': ERROR},
+ {'substr': 'Timeout waiting for marionette on port', 'level': ERROR},
+ {'regex': re.compile(r'''(TEST-UNEXPECTED|PROCESS-CRASH)'''), 'level': ERROR},
+ {'regex': re.compile(r'''(\b((?!Marionette|TestMarionette|NoSuchElement|XPathLookup|NoSuchWindow|StaleElement|ScriptTimeout|ElementNotVisible|NoSuchFrame|InvalidResponse|Javascript|Timeout|InvalidElementState|NoAlertPresent|InvalidCookieDomain|UnableToSetCookie|InvalidSelector|MoveTargetOutOfBounds)\w*)Exception)'''), 'level': ERROR},
+ ]
+
+ repos = []
+
+ def __init__(self, require_config_file=False):
+ super(MarionetteTest, self).__init__(
+ config_options=self.config_options,
+ all_actions=['clobber',
+ 'read-buildbot-config',
+ 'pull',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests'],
+ default_actions=['clobber',
+ 'pull',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests'],
+ require_config_file=require_config_file,
+ config={'require_test_zip': True})
+
+ # these are necessary since self.config is read only
+ c = self.config
+ self.installer_url = c.get('installer_url')
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+
+ if c.get('structured_output'):
+ self.parser_class = StructuredOutputParser
+ else:
+ self.parser_class = TestSummaryOutputParserHelper
+
+ def _pre_config_lock(self, rw_config):
+ super(MarionetteTest, self)._pre_config_lock(rw_config)
+ if not self.config.get('emulator') and not self.config.get('marionette_address'):
+ self.fatal("You need to specify a --marionette-address for non-emulator tests! (Try --marionette-address localhost:2828 )")
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(MarionetteTest, self).query_abs_dirs()
+ dirs = {}
+ dirs['abs_test_install_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'tests')
+ dirs['abs_marionette_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'harness', 'marionette_harness')
+ dirs['abs_marionette_tests_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'tests', 'testing',
+ 'marionette', 'harness', 'marionette_harness', 'tests')
+ dirs['abs_gecko_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'gecko')
+ dirs['abs_emulator_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'emulator')
+
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ @PreScriptAction('create-virtualenv')
+ def _configure_marionette_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'marionette_requirements.txt')
+ if os.access(requirements, os.F_OK):
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+ else:
+ # XXX Bug 879765: Dependent modules need to be listed before parent
+ # modules, otherwise they will get installed from the pypi server.
+ # XXX Bug 908356: This block can be removed as soon as the
+ # in-tree requirements files propagate to all active trees.
+ mozbase_dir = os.path.join('tests', 'mozbase')
+ self.register_virtualenv_module(
+ 'manifestparser', os.path.join(mozbase_dir, 'manifestdestiny'))
+ for m in ('mozfile', 'mozlog', 'mozinfo', 'moznetwork', 'mozhttpd',
+ 'mozcrash', 'mozinstall', 'mozdevice', 'mozprofile',
+ 'mozprocess', 'mozrunner'):
+ self.register_virtualenv_module(
+ m, os.path.join(mozbase_dir, m))
+
+ self.register_virtualenv_module(
+ 'marionette', os.path.join('tests', 'marionette'))
+
+ def _get_options_group(self, is_emulator):
+ """
+ Determine which in tree options group to use and return the
+ appropriate key.
+ """
+ platform = 'emulator' if is_emulator else 'desktop'
+ # Currently running marionette on an emulator means webapi
+ # tests. This method will need to change if this does.
+ testsuite = 'webapi' if is_emulator else 'marionette'
+ return '{}_{}'.format(testsuite, platform)
+
+ def download_and_extract(self):
+ super(MarionetteTest, self).download_and_extract()
+
+ if self.config.get('emulator'):
+ dirs = self.query_abs_dirs()
+
+ self.mkdir_p(dirs['abs_emulator_dir'])
+ tar = self.query_exe('tar', return_type='list')
+ self.run_command(tar + ['zxf', self.installer_path],
+ cwd=dirs['abs_emulator_dir'],
+ error_list=TarErrorList,
+ halt_on_failure=True, fatal_exit_code=3)
+
+ def install(self):
+ if self.config.get('emulator'):
+ self.info("Emulator tests; skipping.")
+ else:
+ super(MarionetteTest, self).install()
+
+ def run_tests(self):
+ """
+ Run the Marionette tests
+ """
+ dirs = self.query_abs_dirs()
+
+ raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
+ 'marionette_raw.log')
+ error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
+ 'marionette_errorsummary.log')
+ html_report_file = os.path.join(dirs['abs_blob_upload_dir'],
+ 'report.html')
+
+ config_fmt_args = {
+ # emulator builds require a longer timeout
+ 'timeout': 60000 if self.config.get('emulator') else 10000,
+ 'profile': os.path.join(dirs['abs_work_dir'], 'profile'),
+ 'xml_output': os.path.join(dirs['abs_work_dir'], 'output.xml'),
+ 'html_output': os.path.join(dirs['abs_blob_upload_dir'], 'output.html'),
+ 'logcat_dir': dirs['abs_work_dir'],
+ 'emulator': 'arm',
+ 'symbols_path': self.symbols_path,
+ 'binary': self.binary_path,
+ 'address': self.config.get('marionette_address'),
+ 'raw_log_file': raw_log_file,
+ 'error_summary_file': error_summary_file,
+ 'html_report_file': html_report_file,
+ 'gecko_log': dirs["abs_blob_upload_dir"],
+ 'this_chunk': self.config.get('this_chunk', 1),
+ 'total_chunks': self.config.get('total_chunks', 1)
+ }
+
+ self.info("The emulator type: %s" % config_fmt_args["emulator"])
+ # build the marionette command arguments
+ python = self.query_python_path('python')
+
+ cmd = [python, '-u', os.path.join(dirs['abs_marionette_dir'],
+ 'runtests.py')]
+
+ manifest = os.path.join(dirs['abs_marionette_tests_dir'],
+ self.config['test_manifest'])
+
+ if self.config.get('app_arg'):
+ config_fmt_args['app_arg'] = self.config['app_arg']
+
+ if not self.config['e10s']:
+ cmd.append('--disable-e10s')
+
+ cmd.append('--gecko-log=%s' % os.path.join(dirs["abs_blob_upload_dir"],
+ 'gecko.log'))
+
+ if self.config.get("structured_output"):
+ cmd.append("--log-raw=-")
+
+ options_group = self._get_options_group(self.config.get('emulator'))
+
+ if options_group not in self.config["suite_definitions"]:
+ self.fatal("%s is not defined in the config!" % options_group)
+
+ for s in self.config["suite_definitions"][options_group]["options"]:
+ cmd.append(s % config_fmt_args)
+
+ if self.mkdir_p(dirs["abs_blob_upload_dir"]) == -1:
+ # Make sure that the logging directory exists
+ self.fatal("Could not create blobber upload directory")
+
+ cmd.append(manifest)
+
+ try_options, try_tests = self.try_args("marionette")
+ cmd.extend(self.query_tests_args(try_tests,
+ str_format_values=config_fmt_args))
+
+ env = {}
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
+ self.mkdir_p(env['MOZ_UPLOAD_DIR'])
+ env = self.query_env(partial_env=env)
+
+ marionette_parser = self.parser_class(config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list,
+ strict=False)
+ return_code = self.run_command(cmd, env=env,
+ output_timeout=1000,
+ output_parser=marionette_parser)
+ level = INFO
+ tbpl_status, log_level = marionette_parser.evaluate_parser(
+ return_code=return_code)
+ marionette_parser.append_tinderboxprint_line("marionette")
+
+ qemu = os.path.join(dirs['abs_work_dir'], 'qemu.log')
+ if os.path.isfile(qemu):
+ self.copyfile(qemu, os.path.join(dirs['abs_blob_upload_dir'],
+ 'qemu.log'))
+
+ # dump logcat output if there were failures
+ if self.config.get('emulator'):
+ if marionette_parser.failed != "0" or 'T-FAIL' in marionette_parser.tsummary:
+ logcat = os.path.join(dirs['abs_work_dir'], 'emulator-5554.log')
+ if os.access(logcat, os.F_OK):
+ self.info('dumping logcat')
+ self.run_command(['cat', logcat], error_list=LogcatErrorList)
+ else:
+ self.info('no logcat file found')
+ else:
+ # .. or gecko.log if it exists
+ gecko_log = os.path.join(self.config['base_work_dir'], 'gecko.log')
+ if os.access(gecko_log, os.F_OK):
+ self.info('dumping gecko.log')
+ self.run_command(['cat', gecko_log])
+ self.rmtree(gecko_log)
+ else:
+ self.info('gecko.log not found')
+
+ marionette_parser.print_summary('marionette')
+
+ self.log("Marionette exited with return code %s: %s" % (return_code, tbpl_status),
+ level=level)
+ self.buildbot_status(tbpl_status)
+
+
+if __name__ == '__main__':
+ marionetteTest = MarionetteTest()
+ marionetteTest.run_and_exit()
diff --git a/testing/mozharness/scripts/marionette_harness_tests.py b/testing/mozharness/scripts/marionette_harness_tests.py
new file mode 100644
index 000000000..0811bef9c
--- /dev/null
+++ b/testing/mozharness/scripts/marionette_harness_tests.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.python import PreScriptAction
+from mozharness.base.python import (
+ VirtualenvMixin,
+ virtualenv_config_options,
+)
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import (
+ BuildbotMixin, TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE,
+ TBPL_EXCEPTION
+)
+
+marionette_harness_tests_config_options = [
+ [['--tests'], {
+ 'dest': 'test_path',
+ 'default': None,
+ 'help': 'Path to test_*.py or directory relative to src root.',
+ }],
+ [['--src-dir'], {
+ 'dest': 'rel_src_dir',
+ 'default': None,
+ 'help': 'Path to hg.mo source checkout relative to work dir.',
+ }],
+
+] + copy.deepcopy(virtualenv_config_options)
+
+marionette_harness_tests_config = {
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ # relative to workspace
+ "rel_src_dir": os.path.join("build", "src"),
+}
+
+class MarionetteHarnessTests(VirtualenvMixin, BuildbotMixin, BaseScript):
+
+ def __init__(self, config_options=None,
+ all_actions=None, default_actions=None,
+ *args, **kwargs):
+ config_options = config_options or marionette_harness_tests_config_options
+ actions = [
+ 'clobber',
+ 'create-virtualenv',
+ 'run-tests',
+ ]
+ super(MarionetteHarnessTests, self).__init__(
+ config_options=config_options,
+ all_actions=all_actions or actions,
+ default_actions=default_actions or actions,
+ config=marionette_harness_tests_config,
+ *args, **kwargs)
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+ c = self.config
+ requirements = os.path.join(
+ dirs['abs_src_dir'],
+ 'testing', 'config',
+ 'marionette_harness_test_requirements.txt'
+ )
+ self.register_virtualenv_module(
+ requirements=[requirements],
+ two_pass=True
+ )
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ c = self.config
+ abs_dirs = super(MarionetteHarnessTests, self).query_abs_dirs()
+ dirs = {
+ 'abs_src_dir': os.path.abspath(
+ os.path.join(abs_dirs['base_work_dir'], c['rel_src_dir'])
+ ),
+ }
+
+ for key in dirs:
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def _get_pytest_status(self, code):
+ """
+ Translate pytest exit code to TH status
+
+ Based on https://github.com/pytest-dev/pytest/blob/master/_pytest/main.py#L21-L26
+ """
+ if code == 0:
+ return TBPL_SUCCESS
+ elif code == 1:
+ return TBPL_WARNING
+ elif 1 < code < 6:
+ self.error("pytest returned exit code: %s" % code)
+ return TBPL_FAILURE
+ else:
+ return TBPL_EXCEPTION
+
+ def run_tests(self):
+ """Run all the tests"""
+ dirs = self.query_abs_dirs()
+ test_relpath = self.config.get(
+ 'test_path',
+ os.path.join('testing', 'marionette',
+ 'harness', 'marionette_harness', 'tests',
+ 'harness_unit')
+ )
+ test_path = os.path.join(dirs['abs_src_dir'], test_relpath)
+ self.activate_virtualenv()
+ import pytest
+ command = ['-p', 'no:terminalreporter', # disable pytest logging
+ test_path]
+ logs = {}
+ for fmt in ['tbpl', 'mach', 'raw']:
+ logs[fmt] = os.path.join(dirs['abs_log_dir'],
+ 'mn-harness_{}.log'.format(fmt))
+ command.extend(['--log-'+fmt, logs[fmt]])
+ self.info('Calling pytest.main with the following arguments: %s' % command)
+ status = self._get_pytest_status(pytest.main(command))
+ self.read_from_file(logs['tbpl'])
+ for log in logs.values():
+ self.copy_to_upload_dir(log, dest='logs/')
+ self.buildbot_status(status)
+
+
+if __name__ == '__main__':
+ script = MarionetteHarnessTests()
+ script.run_and_exit()
diff --git a/testing/mozharness/scripts/merge_day/gecko_migration.py b/testing/mozharness/scripts/merge_day/gecko_migration.py
new file mode 100755
index 000000000..7208630e0
--- /dev/null
+++ b/testing/mozharness/scripts/merge_day/gecko_migration.py
@@ -0,0 +1,545 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" gecko_migration.py
+
+Merge day script for gecko (mozilla-central -> mozilla-aurora,
+mozilla-aurora -> mozilla-beta, mozilla-beta -> mozilla-release).
+
+Ported largely from
+http://hg.mozilla.org/build/tools/file/084bc4e2fc76/release/beta2release.py
+and
+http://hg.mozilla.org/build/tools/file/084bc4e2fc76/release/merge_helper.py
+"""
+
+import os
+import pprint
+import subprocess
+import sys
+from getpass import getpass
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.errors import HgErrorList
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.selfserve import SelfServeMixin
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+
+VALID_MIGRATION_BEHAVIORS = (
+ "beta_to_release", "aurora_to_beta", "central_to_aurora", "release_to_esr",
+ "bump_second_digit",
+)
+
+
+# GeckoMigration {{{1
+class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
+ SelfServeMixin, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--balrog-api-root', ], {
+ "action": "store",
+ "dest": "balrog_api_root",
+ "type": "string",
+ "help": "Specify Balrog API root URL.",
+ }],
+ [['--balrog-username', ], {
+ "action": "store",
+ "dest": "balrog_username",
+ "type": "string",
+ "help": "Specify what user to connect to Balrog with.",
+ }],
+ [['--balrog-credentials-file', ], {
+ "action": "store",
+ "dest": "balrog_credentials_file",
+ "type": "string",
+ "help": "The file containing the Balrog credentials.",
+ }],
+ [['--remove-locale', ], {
+ "action": "extend",
+ "dest": "remove_locales",
+ "type": "string",
+ "help": "Comma separated list of locales to remove from the 'to' repo.",
+ }],
+ ]
+ gecko_repos = None
+
+ def __init__(self, require_config_file=True):
+ super(GeckoMigration, self).__init__(
+ config_options=virtualenv_config_options + self.config_options,
+ all_actions=[
+ 'clobber',
+ 'create-virtualenv',
+ 'clean-repos',
+ 'pull',
+ 'lock-update-paths',
+ 'migrate',
+ 'bump_second_digit',
+ 'commit-changes',
+ 'push',
+ 'trigger-builders',
+ ],
+ default_actions=[
+ 'clean-repos',
+ 'pull',
+ 'migrate',
+ ],
+ require_config_file=require_config_file
+ )
+ self.run_sanity_check()
+
+# Helper methods {{{1
+ def run_sanity_check(self):
+ """ Verify the configs look sane before proceeding.
+ """
+ message = ""
+ if self.config['migration_behavior'] not in VALID_MIGRATION_BEHAVIORS:
+ message += "%s must be one of %s!\n" % (self.config['migration_behavior'], VALID_MIGRATION_BEHAVIORS)
+ if self.config['migration_behavior'] == 'beta_to_release':
+ if self.config.get("require_remove_locales") and not self.config.get("remove_locales") and 'migrate' in self.actions:
+ message += "You must specify --remove-locale!\n"
+ else:
+ if self.config.get("require_remove_locales") or self.config.get("remove_locales"):
+ self.warning("--remove-locale isn't valid unless you're using beta_to_release migration_behavior!\n")
+ if message:
+ self.fatal(message)
+
+ def query_abs_dirs(self):
+ """ Allow for abs_from_dir and abs_to_dir
+ """
+ if self.abs_dirs:
+ return self.abs_dirs
+ dirs = super(GeckoMigration, self).query_abs_dirs()
+ self.abs_dirs['abs_tools_dir'] = os.path.join(
+ dirs['abs_work_dir'], 'tools'
+ )
+ self.abs_dirs['abs_tools_lib_dir'] = os.path.join(
+ dirs['abs_work_dir'], 'tools', 'lib', 'python'
+ )
+ for k in ('from', 'to'):
+ url = self.config.get("%s_repo_url" % k)
+ if url:
+ dir_name = self.get_filename_from_url(url)
+ self.info("adding %s" % dir_name)
+ self.abs_dirs['abs_%s_dir' % k] = os.path.join(
+ dirs['abs_work_dir'], dir_name
+ )
+ return self.abs_dirs
+
+ def query_repos(self):
+ """ Build a list of repos to clone.
+ """
+ if self.gecko_repos:
+ return self.gecko_repos
+ self.info("Building gecko_repos list...")
+ dirs = self.query_abs_dirs()
+ self.gecko_repos = []
+ for k in ('from', 'to'):
+ repo_key = "%s_repo_url" % k
+ url = self.config.get(repo_key)
+ if url:
+ self.gecko_repos.append({
+ "repo": url,
+ "branch": self.config.get("%s_repo_branch" % (k,), "default"),
+ "dest": dirs['abs_%s_dir' % k],
+ "vcs": "hg",
+ # "hg" vcs uses robustcheckout extension requires the use of a share
+ # but having a share breaks migration logic when merging repos.
+ # Solution: tell hg vcs to create a unique share directory for each
+ # gecko repo. see mozharness/base/vcs/mercurial.py for implementation
+ "use_vcs_unique_share": True,
+ })
+ else:
+ self.warning("Skipping %s" % repo_key)
+ self.info(pprint.pformat(self.gecko_repos))
+ return self.gecko_repos
+
+ def query_commit_dirs(self):
+ dirs = self.query_abs_dirs()
+ commit_dirs = [dirs['abs_to_dir']]
+ if self.config['migration_behavior'] == 'central_to_aurora':
+ commit_dirs.append(dirs['abs_from_dir'])
+ return commit_dirs
+
+ def query_commit_message(self):
+ return "Update configs. IGNORE BROKEN CHANGESETS CLOSED TREE NO BUG a=release ba=release"
+
+ def query_push_dirs(self):
+ dirs = self.query_abs_dirs()
+ return dirs.get('abs_from_dir'), dirs.get('abs_to_dir')
+
+ def query_push_args(self, cwd):
+ if cwd == self.query_abs_dirs()['abs_to_dir'] and \
+ self.config['migration_behavior'] == 'beta_to_release':
+ return ['--new-branch', '-r', '.']
+ else:
+ return ['-r', '.']
+
+ def query_from_revision(self):
+ """ Shortcut to get the revision for the from repo
+ """
+ dirs = self.query_abs_dirs()
+ return self.query_hg_revision(dirs['abs_from_dir'])
+
+ def query_to_revision(self):
+ """ Shortcut to get the revision for the to repo
+ """
+ dirs = self.query_abs_dirs()
+ return self.query_hg_revision(dirs['abs_to_dir'])
+
+ def hg_merge_via_debugsetparents(self, cwd, old_head, new_head,
+ preserve_tags=True, user=None):
+ """ Merge 2 heads avoiding non-fastforward commits
+ """
+ hg = self.query_exe('hg', return_type='list')
+ cmd = hg + ['debugsetparents', new_head, old_head]
+ self.run_command(cmd, cwd=cwd, error_list=HgErrorList,
+ halt_on_failure=True)
+ self.hg_commit(
+ cwd,
+ message="Merge old head via |hg debugsetparents %s %s|. "
+ "CLOSED TREE DONTBUILD a=release" % (new_head, old_head),
+ user=user
+ )
+ if preserve_tags:
+ # I don't know how to do this elegantly.
+ # I'm reverting .hgtags to old_head, then appending the new tags
+ # from new_head to .hgtags, and hoping nothing goes wrong.
+ # I'd rather not write patch files from scratch, so this seems
+ # like a slightly more complex but less objectionable method?
+ self.info("Trying to preserve tags from before debugsetparents...")
+ dirs = self.query_abs_dirs()
+ patch_file = os.path.join(dirs['abs_work_dir'], 'patch_file')
+ self.run_command(
+ subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags', '-U9', '>', patch_file]),
+ cwd=cwd,
+ )
+ self.run_command(
+ ['patch', '-R', '-p1', '-i', patch_file],
+ cwd=cwd,
+ halt_on_failure=True,
+ )
+ tag_diff = self.read_from_file(patch_file)
+ with self.opened(os.path.join(cwd, '.hgtags'), open_mode='a') as (fh, err):
+ if err:
+ self.fatal("Can't append to .hgtags!")
+ for n, line in enumerate(tag_diff.splitlines()):
+ # The first 4 lines of a patch are headers, so we ignore them.
+ if n < 5:
+ continue
+ # Even after that, the only lines we really care about are
+ # additions to the file.
+ # TODO: why do we only care about additions? I couldn't
+ # figure that out by reading this code.
+ if not line.startswith('+'):
+ continue
+ line = line.replace('+', '')
+ (changeset, tag) = line.split(' ')
+ if len(changeset) != 40:
+ continue
+ fh.write("%s\n" % line)
+ out = self.get_output_from_command(['hg', 'status', '.hgtags'],
+ cwd=cwd)
+ if out:
+ self.hg_commit(
+ cwd,
+ message="Preserve old tags after debugsetparents. "
+ "CLOSED TREE DONTBUILD a=release",
+ user=user,
+ )
+ else:
+ self.info(".hgtags file is identical, no need to commit")
+
+ def remove_locales(self, file_name, locales):
+ """ Remove locales from shipped-locales (m-r only)
+ """
+ contents = self.read_from_file(file_name)
+ new_contents = ""
+ for line in contents.splitlines():
+ locale = line.split()[0]
+ if locale not in locales:
+ new_contents += "%s\n" % line
+ else:
+ self.info("Removed locale: %s" % locale)
+ self.write_to_file(file_name, new_contents)
+
+ def touch_clobber_file(self, cwd):
+ clobber_file = os.path.join(cwd, 'CLOBBER')
+ contents = self.read_from_file(clobber_file)
+ new_contents = ""
+ for line in contents.splitlines():
+ line = line.strip()
+ if line.startswith("#") or line == '':
+ new_contents += "%s\n" % line
+ new_contents += "Merge day clobber"
+ self.write_to_file(clobber_file, new_contents)
+
+ def bump_version(self, cwd, curr_version, next_version, curr_suffix,
+ next_suffix, bump_major=False):
+ """ Bump versions (m-c, m-a, m-b).
+
+ At some point we may want to unhardcode these filenames into config
+ """
+ curr_weave_version = str(int(curr_version) + 2)
+ next_weave_version = str(int(curr_weave_version) + 1)
+ for f in self.config["version_files"]:
+ from_ = "%s.0%s" % (curr_version, curr_suffix)
+ to = "%s.0%s%s" % (next_version, next_suffix, f["suffix"])
+ self.replace(os.path.join(cwd, f["file"]), from_, to)
+
+ # only applicable for m-c
+ if bump_major:
+ self.replace(
+ os.path.join(cwd, "xpcom/components/Module.h"),
+ "static const unsigned int kVersion = %s;" % curr_version,
+ "static const unsigned int kVersion = %s;" % next_version
+ )
+ self.replace(
+ os.path.join(cwd, "services/sync/moz.build"),
+ "DEFINES['weave_version'] = '1.%s.0'" % curr_weave_version,
+ "DEFINES['weave_version'] = '1.%s.0'" % next_weave_version
+ )
+
+ # Branch-specific workflow helper methods {{{1
+ def central_to_aurora(self, end_tag):
+ """ mozilla-central -> mozilla-aurora behavior.
+
+ We could have all of these individually toggled by flags, but
+ by separating into workflow methods we can be more precise about
+ what happens in each workflow, while allowing for things like
+ staging beta user repo migrations.
+ """
+ dirs = self.query_abs_dirs()
+ self.info("Reverting locales")
+ hg = self.query_exe("hg", return_type="list")
+ for f in self.config["locale_files"]:
+ self.run_command(
+ hg + ["revert", "-r", end_tag, f],
+ cwd=dirs['abs_to_dir'],
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+ next_ma_version = self.get_version(dirs['abs_to_dir'])[0]
+ self.bump_version(dirs['abs_to_dir'], next_ma_version, next_ma_version, "a1", "a2")
+ self.apply_replacements()
+ # bump m-c version
+ curr_mc_version = self.get_version(dirs['abs_from_dir'])[0]
+ next_mc_version = str(int(curr_mc_version) + 1)
+ self.bump_version(
+ dirs['abs_from_dir'], curr_mc_version, next_mc_version, "a1", "a1",
+ bump_major=True
+ )
+ # touch clobber files
+ self.touch_clobber_file(dirs['abs_from_dir'])
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def aurora_to_beta(self, *args, **kwargs):
+ """ mozilla-aurora -> mozilla-beta behavior.
+
+ We could have all of these individually toggled by flags, but
+ by separating into workflow methods we can be more precise about
+ what happens in each workflow, while allowing for things like
+ staging beta user repo migrations.
+ """
+ dirs = self.query_abs_dirs()
+ mb_version = self.get_version(dirs['abs_to_dir'])[0]
+ self.bump_version(dirs['abs_to_dir'], mb_version, mb_version, "a2", "")
+ self.apply_replacements()
+ self.touch_clobber_file(dirs['abs_to_dir'])
+ # TODO mozconfig diffing
+ # The build/tools version only checks the mozconfigs from hgweb, so
+ # can't help pre-push. The in-tree mozconfig diffing requires a mach
+ # virtualenv to be installed. If we want this sooner we can put this
+ # in the push action; otherwise we may just wait until we have in-tree
+ # mozconfig checking.
+
+ def beta_to_release(self, *args, **kwargs):
+ """ mozilla-beta -> mozilla-release behavior.
+
+ We could have all of these individually toggled by flags, but
+ by separating into workflow methods we can be more precise about
+ what happens in each workflow, while allowing for things like
+ staging beta user repo migrations.
+ """
+ dirs = self.query_abs_dirs()
+ # Reset display_version.txt
+ for f in self.config["copy_files"]:
+ self.copyfile(
+ os.path.join(dirs['abs_to_dir'], f["src"]),
+ os.path.join(dirs['abs_to_dir'], f["dst"]))
+
+ self.apply_replacements()
+ if self.config.get("remove_locales"):
+ self.remove_locales(
+ os.path.join(dirs['abs_to_dir'], "browser/locales/shipped-locales"),
+ self.config['remove_locales']
+ )
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def release_to_esr(self, *args, **kwargs):
+ """ mozilla-release -> mozilla-esrNN behavior. """
+ dirs = self.query_abs_dirs()
+ for to_transplant in self.config.get("transplant_patches", []):
+ self.transplant(repo=to_transplant["repo"],
+ changeset=to_transplant["changeset"],
+ cwd=dirs['abs_to_dir'])
+ self.apply_replacements()
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def apply_replacements(self):
+ dirs = self.query_abs_dirs()
+ for f, from_, to in self.config["replacements"]:
+ self.replace(os.path.join(dirs['abs_to_dir'], f), from_, to)
+
+ def transplant(self, repo, changeset, cwd):
+ """Transplant a Mercurial changeset from a remote repository."""
+ hg = self.query_exe("hg", return_type="list")
+ cmd = hg + ["--config", "extensions.transplant=", "transplant",
+ "--source", repo, changeset]
+ self.info("Transplanting %s from %s" % (changeset, repo))
+ status = self.run_command(
+ cmd,
+ cwd=cwd,
+ error_list=HgErrorList,
+ )
+ if status != 0:
+ self.fatal("Cannot transplant %s from %s properly" %
+ (changeset, repo))
+
+ def pull_from_repo(self, from_dir, to_dir, revision=None, branch=None):
+ """ Pull from one repo to another. """
+ hg = self.query_exe("hg", return_type="list")
+ cmd = hg + ["pull"]
+ if revision:
+ cmd.extend(["-r", revision])
+ cmd.append(from_dir)
+ self.run_command(
+ cmd,
+ cwd=to_dir,
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+ cmd = hg + ["update", "-C"]
+ if branch or revision:
+ cmd.extend(["-r", branch or revision])
+ self.run_command(
+ cmd,
+ cwd=to_dir,
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+
+# Actions {{{1
+ def bump_second_digit(self, *args, **kwargs):
+ """Bump second digit.
+
+ ESR need only the second digit bumped as a part of merge day."""
+ dirs = self.query_abs_dirs()
+ version = self.get_version(dirs['abs_to_dir'])
+ curr_version = ".".join(version)
+ next_version = list(version)
+ # bump the second digit
+ next_version[1] = str(int(next_version[1]) + 1)
+ # Take major+minor and append '0' accordng to Firefox version schema.
+ # 52.0 will become 52.1.0, not 52.1
+ next_version = ".".join(next_version[:2] + ['0'])
+ for f in self.config["version_files"]:
+ self.replace(os.path.join(dirs['abs_to_dir'], f["file"]),
+ curr_version, next_version + f["suffix"])
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def pull(self):
+ """ Pull tools first, then clone the gecko repos
+ """
+ repos = [{
+ "repo": self.config["tools_repo_url"],
+ "branch": self.config["tools_repo_branch"],
+ "dest": "tools",
+ "vcs": "hg",
+ }] + self.query_repos()
+ super(GeckoMigration, self).pull(repos=repos)
+
+ def lock_update_paths(self):
+ self.lock_balrog_rules(self.config["balrog_rules_to_lock"])
+
+ def migrate(self):
+ """ Perform the migration.
+ """
+ dirs = self.query_abs_dirs()
+ from_fx_major_version = self.get_version(dirs['abs_from_dir'])[0]
+ to_fx_major_version = self.get_version(dirs['abs_to_dir'])[0]
+ base_from_rev = self.query_from_revision()
+ base_to_rev = self.query_to_revision()
+ base_tag = self.config['base_tag'] % {'major_version': from_fx_major_version}
+ end_tag = self.config['end_tag'] % {'major_version': to_fx_major_version}
+ self.hg_tag(
+ dirs['abs_from_dir'], base_tag, user=self.config['hg_user'],
+ revision=base_from_rev,
+ )
+ new_from_rev = self.query_from_revision()
+ self.info("New revision %s" % new_from_rev)
+ pull_revision = None
+ if not self.config.get("pull_all_branches"):
+ pull_revision = new_from_rev
+ self.pull_from_repo(
+ dirs['abs_from_dir'], dirs['abs_to_dir'],
+ revision=pull_revision,
+ branch="default",
+ )
+ if self.config.get("requires_head_merge") is not False:
+ self.hg_merge_via_debugsetparents(
+ dirs['abs_to_dir'], old_head=base_to_rev, new_head=new_from_rev,
+ user=self.config['hg_user'],
+ )
+ self.hg_tag(
+ dirs['abs_to_dir'], end_tag, user=self.config['hg_user'],
+ revision=base_to_rev, force=True,
+ )
+ # Call beta_to_release etc.
+ if not hasattr(self, self.config['migration_behavior']):
+ self.fatal("Don't know how to proceed with migration_behavior %s !" % self.config['migration_behavior'])
+ getattr(self, self.config['migration_behavior'])(end_tag=end_tag)
+ self.info("Verify the diff, and apply any manual changes, such as disabling features, and --commit-changes")
+
+ def trigger_builders(self):
+ """Triggers builders that should be run directly after a merge.
+ There are two different types of things we trigger:
+ 1) Nightly builds ("post_merge_nightly_branches" in the config).
+ These are triggered with buildapi's nightly build endpoint to avoid
+ duplicating all of the nightly builder names into the gecko
+ migration mozharness configs. (Which would surely get out of date
+ very quickly).
+ 2) Arbitrary builders ("post_merge_builders"). These are additional
+ builders to trigger that aren't part of the nightly builder set.
+ Previous example: hg bundle generation builders.
+ """
+ dirs = self.query_abs_dirs()
+ branch = self.config["to_repo_url"].rstrip("/").split("/")[-1]
+ revision = self.query_to_revision()
+ # Horrible hack because our internal buildapi interface doesn't let us
+ # actually do anything. Need to use the public one w/ auth.
+ username = raw_input("LDAP Username: ")
+ password = getpass(prompt="LDAP Password: ")
+ auth = (username, password)
+ for builder in self.config["post_merge_builders"]:
+ self.trigger_arbitrary_job(builder, branch, revision, auth)
+ for nightly_branch in self.config["post_merge_nightly_branches"]:
+ nightly_revision = self.query_hg_revision(os.path.join(dirs["abs_work_dir"], nightly_branch))
+ self.trigger_nightly_builds(nightly_branch, nightly_revision, auth)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ GeckoMigration().run_and_exit()
diff --git a/testing/mozharness/scripts/mobile_l10n.py b/testing/mozharness/scripts/mobile_l10n.py
new file mode 100755
index 000000000..cbac6fa67
--- /dev/null
+++ b/testing/mozharness/scripts/mobile_l10n.py
@@ -0,0 +1,714 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""mobile_l10n.py
+
+This currently supports nightly and release single locale repacks for
+Android. This also creates nightly updates.
+"""
+
+from copy import deepcopy
+import os
+import re
+import subprocess
+import sys
+import time
+import shlex
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import BaseErrorList, MakefileErrorList
+from mozharness.base.log import OutputParser
+from mozharness.base.transfer import TransferMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.mozilla.signing import MobileSigningMixin
+from mozharness.mozilla.tooltool import TooltoolMixin
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.l10n.locales import LocalesMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.base.python import VirtualenvMixin
+from mozharness.mozilla.taskcluster_helper import Taskcluster
+
+
+# MobileSingleLocale {{{1
+class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
+ MobileSigningMixin, TransferMixin, TooltoolMixin,
+ BuildbotMixin, PurgeMixin, MercurialScript, BalrogMixin,
+ VirtualenvMixin):
+ config_options = [[
+ ['--locale', ],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to sign and update"
+ }
+ ], [
+ ['--locales-file', ],
+ {"action": "store",
+ "dest": "locales_file",
+ "type": "string",
+ "help": "Specify a file to determine which locales to sign and update"
+ }
+ ], [
+ ['--tag-override', ],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"
+ }
+ ], [
+ ['--user-repo-override', ],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"
+ }
+ ], [
+ ['--release-config-file', ],
+ {"action": "store",
+ "dest": "release_config_file",
+ "type": "string",
+ "help": "Specify the release config file to use"
+ }
+ ], [
+ ['--key-alias', ],
+ {"action": "store",
+ "dest": "key_alias",
+ "type": "choice",
+ "default": "nightly",
+ "choices": ["nightly", "release"],
+ "help": "Specify the signing key alias"
+ }
+ ], [
+ ['--this-chunk', ],
+ {"action": "store",
+ "dest": "this_locale_chunk",
+ "type": "int",
+ "help": "Specify which chunk of locales to run"
+ }
+ ], [
+ ['--total-chunks', ],
+ {"action": "store",
+ "dest": "total_locale_chunks",
+ "type": "int",
+ "help": "Specify the total number of chunks of locales"
+ }
+ ], [
+ ["--disable-mock"],
+ {"dest": "disable_mock",
+ "action": "store_true",
+ "help": "do not run under mock despite what gecko-config says",
+ }
+ ], [
+ ['--revision', ],
+ {"action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "Override the gecko revision to use (otherwise use buildbot supplied"
+ " value, or en-US revision) "}
+ ]]
+
+ def __init__(self, require_config_file=True):
+ buildscript_kwargs = {
+ 'all_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "validate-repacks-signed",
+ "upload-repacks",
+ "create-virtualenv",
+ "taskcluster-upload",
+ "submit-to-balrog",
+ "summary",
+ ],
+ 'config': {
+ 'taskcluster_credentials_file': 'oauth.txt',
+ 'virtualenv_modules': [
+ 'requests==2.8.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.26',
+ ],
+ 'virtualenv_path': 'venv',
+ },
+ }
+ LocalesMixin.__init__(self)
+ MercurialScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ **buildscript_kwargs
+ )
+ self.base_package_name = None
+ self.buildid = None
+ self.make_ident_output = None
+ self.repack_env = None
+ self.revision = None
+ self.upload_env = None
+ self.version = None
+ self.upload_urls = {}
+ self.locales_property = {}
+
+ # Helper methods {{{2
+ def query_repack_env(self):
+ if self.repack_env:
+ return self.repack_env
+ c = self.config
+ replace_dict = {}
+ if c.get('release_config_file'):
+ rc = self.query_release_config()
+ replace_dict = {
+ 'version': rc['version'],
+ 'buildnum': rc['buildnum']
+ }
+ repack_env = self.query_env(partial_env=c.get("repack_env"),
+ replace_dict=replace_dict)
+ if c.get('base_en_us_binary_url') and c.get('release_config_file'):
+ rc = self.query_release_config()
+ repack_env['EN_US_BINARY_URL'] = c['base_en_us_binary_url'] % replace_dict
+ if 'MOZ_SIGNING_SERVERS' in os.environ:
+ repack_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
+ self.repack_env = repack_env
+ return self.repack_env
+
+ def query_l10n_env(self):
+ return self.query_env()
+
+ def query_upload_env(self):
+ if self.upload_env:
+ return self.upload_env
+ c = self.config
+ replace_dict = {
+ 'buildid': self.query_buildid(),
+ 'version': self.query_version(),
+ }
+ replace_dict.update(c)
+
+ # Android l10n builds use a non-standard location for l10n files. Other
+ # builds go to 'mozilla-central-l10n', while android builds add part of
+ # the platform name as well, like 'mozilla-central-android-api-15-l10n'.
+ # So we override the branch with something that contains the platform
+ # name.
+ replace_dict['branch'] = c['upload_branch']
+ replace_dict['post_upload_extra'] = ' '.join(c.get('post_upload_extra', []))
+
+ upload_env = self.query_env(partial_env=c.get("upload_env"),
+ replace_dict=replace_dict)
+ if 'MOZ_SIGNING_SERVERS' in os.environ:
+ upload_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd())
+ if self.query_is_release_or_beta():
+ upload_env['MOZ_PKG_VERSION'] = '%(version)s' % replace_dict
+ self.upload_env = upload_env
+ return self.upload_env
+
+ def _query_make_ident_output(self):
+ """Get |make ident| output from the objdir.
+ Only valid after setup is run.
+ """
+ if self.make_ident_output:
+ return self.make_ident_output
+ env = self.query_repack_env()
+ dirs = self.query_abs_dirs()
+ output = self.get_output_from_command_m(["make", "ident"],
+ cwd=dirs['abs_locales_dir'],
+ env=env,
+ silent=True,
+ halt_on_failure=True)
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=MakefileErrorList)
+ parser.add_lines(output)
+ self.make_ident_output = output
+ return output
+
+ def query_buildid(self):
+ """Get buildid from the objdir.
+ Only valid after setup is run.
+ """
+ if self.buildid:
+ return self.buildid
+ r = re.compile("buildid (\d+)")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ m = r.match(line)
+ if m:
+ self.buildid = m.groups()[0]
+ return self.buildid
+
+ def query_revision(self):
+ """Get revision from the objdir.
+ Only valid after setup is run.
+ """
+ if self.revision:
+ return self.revision
+ r = re.compile(r"gecko_revision ([0-9a-f]+\+?)")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ m = r.match(line)
+ if m:
+ self.revision = m.groups()[0]
+ return self.revision
+
+ def _query_make_variable(self, variable, make_args=None):
+ make = self.query_exe('make')
+ env = self.query_repack_env()
+ dirs = self.query_abs_dirs()
+ if make_args is None:
+ make_args = []
+ # TODO error checking
+ output = self.get_output_from_command_m(
+ [make, "echo-variable-%s" % variable] + make_args,
+ cwd=dirs['abs_locales_dir'], silent=True,
+ env=env
+ )
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=MakefileErrorList)
+ parser.add_lines(output)
+ return output.strip()
+
+ def query_base_package_name(self):
+ """Get the package name from the objdir.
+ Only valid after setup is run.
+ """
+ if self.base_package_name:
+ return self.base_package_name
+ self.base_package_name = self._query_make_variable(
+ "PACKAGE",
+ make_args=['AB_CD=%(locale)s']
+ )
+ return self.base_package_name
+
+ def query_version(self):
+ """Get the package name from the objdir.
+ Only valid after setup is run.
+ """
+ if self.version:
+ return self.version
+ c = self.config
+ if c.get('release_config_file'):
+ rc = self.query_release_config()
+ self.version = rc['version']
+ else:
+ self.version = self._query_make_variable("MOZ_APP_VERSION")
+ return self.version
+
+ def query_upload_url(self, locale):
+ if locale in self.upload_urls:
+ return self.upload_urls[locale]
+ else:
+ self.error("Can't determine the upload url for %s!" % locale)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
+
+ dirs = {
+ 'abs_tools_dir':
+ os.path.join(abs_dirs['base_work_dir'], 'tools'),
+ 'build_dir':
+ os.path.join(abs_dirs['base_work_dir'], 'build'),
+ }
+
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def add_failure(self, locale, message, **kwargs):
+ self.locales_property[locale] = "Failed"
+ prop_key = "%s_failure" % locale
+ prop_value = self.query_buildbot_property(prop_key)
+ if prop_value:
+ prop_value = "%s %s" % (prop_value, message)
+ else:
+ prop_value = message
+ self.set_buildbot_property(prop_key, prop_value, write_to_file=True)
+ MercurialScript.add_failure(self, locale, message=message, **kwargs)
+
+ def summary(self):
+ MercurialScript.summary(self)
+ # TODO we probably want to make this configurable on/off
+ locales = self.query_locales()
+ for locale in locales:
+ self.locales_property.setdefault(locale, "Success")
+ self.set_buildbot_property("locales", json.dumps(self.locales_property), write_to_file=True)
+
+ # Actions {{{2
+ def clobber(self):
+ self.read_buildbot_config()
+ dirs = self.query_abs_dirs()
+ c = self.config
+ objdir = os.path.join(dirs['abs_work_dir'], c['mozilla_dir'],
+ c['objdir'])
+ super(MobileSingleLocale, self).clobber(always_clobber_dirs=[objdir])
+
+ def pull(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ repos = []
+ replace_dict = {}
+ if c.get("user_repo_override"):
+ replace_dict['user_repo_override'] = c['user_repo_override']
+ # deepcopy() needed because of self.config lock bug :(
+ for repo_dict in deepcopy(c['repos']):
+ repo_dict['repo'] = repo_dict['repo'] % replace_dict
+ repos.append(repo_dict)
+ else:
+ repos = c['repos']
+ self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
+ tag_override=c.get('tag_override'))
+
+ def clone_locales(self):
+ self.pull_locale_source()
+
+ # list_locales() is defined in LocalesMixin.
+
+ def _setup_configure(self, buildid=None):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ env = self.query_repack_env()
+ make = self.query_exe("make")
+ if self.run_command_m([make, "-f", "client.mk", "configure"],
+ cwd=dirs['abs_mozilla_dir'],
+ env=env,
+ error_list=MakefileErrorList):
+ self.fatal("Configure failed!")
+
+ # Run 'make export' in objdir/config to get nsinstall
+ self.run_command_m([make, 'export'],
+ cwd=os.path.join(dirs['abs_objdir'], 'config'),
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+
+ # Run 'make buildid.h' in objdir/ to get the buildid.h file
+ cmd = [make, 'buildid.h']
+ if buildid:
+ cmd.append('MOZ_BUILD_DATE=%s' % str(buildid))
+ self.run_command_m(cmd,
+ cwd=dirs['abs_objdir'],
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+
+ def setup(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ mozconfig_path = os.path.join(dirs['abs_mozilla_dir'], '.mozconfig')
+ self.copyfile(os.path.join(dirs['abs_work_dir'], c['mozconfig']),
+ mozconfig_path)
+ # TODO stop using cat
+ cat = self.query_exe("cat")
+ make = self.query_exe("make")
+ self.run_command_m([cat, mozconfig_path])
+ env = self.query_repack_env()
+ if self.config.get("tooltool_config"):
+ self.tooltool_fetch(
+ self.config['tooltool_config']['manifest'],
+ output_dir=self.config['tooltool_config']['output_dir'] % self.query_abs_dirs(),
+ )
+ self._setup_configure()
+ self.run_command_m([make, "wget-en-US"],
+ cwd=dirs['abs_locales_dir'],
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+ self.run_command_m([make, "unpack"],
+ cwd=dirs['abs_locales_dir'],
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+
+ # on try we want the source we already have, otherwise update to the
+ # same as the en-US binary
+ if self.config.get("update_gecko_source_to_enUS", True):
+ revision = self.query_revision()
+ if not revision:
+ self.fatal("Can't determine revision!")
+ hg = self.query_exe("hg")
+ # TODO do this through VCSMixin instead of hardcoding hg
+ self.run_command_m([hg, "update", "-r", revision],
+ cwd=dirs["abs_mozilla_dir"],
+ env=env,
+ error_list=BaseErrorList,
+ halt_on_failure=True)
+ self.set_buildbot_property('revision', revision, write_to_file=True)
+ # Configure again since the hg update may have invalidated it.
+ buildid = self.query_buildid()
+ self._setup_configure(buildid=buildid)
+
+ def repack(self):
+ # TODO per-locale logs and reporting.
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ make = self.query_exe("make")
+ repack_env = self.query_repack_env()
+ success_count = total_count = 0
+ for locale in locales:
+ total_count += 1
+ self.enable_mock()
+ result = self.run_compare_locales(locale)
+ self.disable_mock()
+ if result:
+ self.add_failure(locale, message="%s failed in compare-locales!" % locale)
+ continue
+ if self.run_command_m([make, "installers-%s" % locale],
+ cwd=dirs['abs_locales_dir'],
+ env=repack_env,
+ error_list=MakefileErrorList,
+ halt_on_failure=False):
+ self.add_failure(locale, message="%s failed in make installers-%s!" % (locale, locale))
+ continue
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Repacked %d of %d binaries successfully.")
+
+ def validate_repacks_signed(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ base_package_name = self.query_base_package_name()
+ base_package_dir = os.path.join(dirs['abs_objdir'], 'dist')
+ repack_env = self.query_repack_env()
+ success_count = total_count = 0
+ for locale in locales:
+ total_count += 1
+ signed_path = os.path.join(base_package_dir,
+ base_package_name % {'locale': locale})
+ # We need to wrap what this function does with mock, since
+ # MobileSigningMixin doesn't know about mock
+ self.enable_mock()
+ status = self.verify_android_signature(
+ signed_path,
+ script=c['signature_verification_script'],
+ env=repack_env,
+ key_alias=c['key_alias'],
+ )
+ self.disable_mock()
+ if status:
+ self.add_failure(locale, message="Errors verifying %s binary!" % locale)
+ # No need to rm because upload is per-locale
+ continue
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Validated signatures on %d of %d binaries successfully.")
+
+ def taskcluster_upload(self):
+ auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
+ credentials = {}
+ execfile(auth, credentials)
+ client_id = credentials.get('taskcluster_clientId')
+ access_token = credentials.get('taskcluster_accessToken')
+ if not client_id or not access_token:
+ self.warning('Skipping S3 file upload: No taskcluster credentials.')
+ return
+
+ self.activate_virtualenv()
+
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ make = self.query_exe("make")
+ upload_env = self.query_upload_env()
+ cwd = dirs['abs_locales_dir']
+ branch = self.config['branch']
+ revision = self.query_revision()
+ repo = self.query_l10n_repo()
+ pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hg')
+ pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))
+ routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
+ 'testing/mozharness/configs/routes.json')
+ with open(routes_json) as routes_file:
+ contents = json.load(routes_file)
+ templates = contents['l10n']
+
+ for locale in locales:
+ output = self.get_output_from_command_m(
+ "%s echo-variable-UPLOAD_FILES AB_CD=%s" % (make, locale),
+ cwd=cwd,
+ env=upload_env,
+ )
+ files = shlex.split(output)
+ abs_files = [os.path.abspath(os.path.join(cwd, f)) for f in files]
+
+ routes = []
+ fmt = {
+ 'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
+ 'project': branch,
+ 'head_rev': revision,
+ 'pushdate': pushdate,
+ 'year': pushdate[0:4],
+ 'month': pushdate[4:6],
+ 'day': pushdate[6:8],
+ 'build_product': self.config['stage_product'],
+ 'build_name': self.query_build_name(),
+ 'build_type': self.query_build_type(),
+ 'locale': locale,
+ }
+ for template in templates:
+ routes.append(template.format(**fmt))
+
+ self.info('Using routes: %s' % routes)
+ tc = Taskcluster(branch,
+ pushinfo.pushdate, # Use pushdate as the rank
+ client_id,
+ access_token,
+ self.log_obj,
+ )
+ task = tc.create_task(routes)
+ tc.claim_task(task)
+
+ for upload_file in abs_files:
+ tc.create_artifact(task, upload_file)
+ tc.report_completed(task)
+
+ def upload_repacks(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ make = self.query_exe("make")
+ base_package_name = self.query_base_package_name()
+ version = self.query_version()
+ upload_env = self.query_upload_env()
+ success_count = total_count = 0
+ buildnum = None
+ if c.get('release_config_file'):
+ rc = self.query_release_config()
+ buildnum = rc['buildnum']
+ for locale in locales:
+ if self.query_failure(locale):
+ self.warning("Skipping previously failed locale %s." % locale)
+ continue
+ total_count += 1
+ if c.get('base_post_upload_cmd'):
+ upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % {'version': version, 'locale': locale, 'buildnum': str(buildnum), 'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
+ output = self.get_output_from_command_m(
+ # Ugly hack to avoid |make upload| stderr from showing up
+ # as get_output_from_command errors
+ "%s upload AB_CD=%s 2>&1" % (make, locale),
+ cwd=dirs['abs_locales_dir'],
+ env=upload_env,
+ silent=True
+ )
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=MakefileErrorList)
+ parser.add_lines(output)
+ if parser.num_errors:
+ self.add_failure(locale, message="%s failed in make upload!" % (locale))
+ continue
+ package_name = base_package_name % {'locale': locale}
+ r = re.compile("(http.*%s)" % package_name)
+ for line in output.splitlines():
+ m = r.match(line)
+ if m:
+ self.upload_urls[locale] = m.groups()[0]
+ self.info("Found upload url %s" % self.upload_urls[locale])
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Make Upload for %d of %d locales successful.")
+
+ def checkout_tools(self):
+ dirs = self.query_abs_dirs()
+
+ # We need hg.m.o/build/tools checked out
+ self.info("Checking out tools")
+ repos = [{
+ 'repo': self.config['tools_repo'],
+ 'vcs': "hg",
+ 'branch': "default",
+ 'dest': dirs['abs_tools_dir'],
+ }]
+ rev = self.vcs_checkout(**repos[0])
+ self.set_buildbot_property("tools_revision", rev, write_to_file=True)
+
+ def query_apkfile_path(self,locale):
+
+ dirs = self.query_abs_dirs()
+ apkdir = os.path.join(dirs['abs_objdir'], 'dist')
+ r = r"(\.)" + re.escape(locale) + r"(\.*)"
+
+ apks = []
+ for f in os.listdir(apkdir):
+ if f.endswith(".apk") and re.search(r, f):
+ apks.append(f)
+ if len(apks) == 0:
+ self.fatal("Found no apks files in %s, don't know what to do:\n%s" % (apkdir, apks), exit_code=1)
+
+ return os.path.join(apkdir, apks[0])
+
+ def query_is_release_or_beta(self):
+
+ return bool(self.config.get("is_release_or_beta"))
+
+ def submit_to_balrog(self):
+
+ if not self.query_is_nightly() and not self.query_is_release_or_beta():
+ self.info("Not a nightly or release build, skipping balrog submission.")
+ return
+
+ if not self.config.get("balrog_servers"):
+ self.info("balrog_servers not set; skipping balrog submission.")
+ return
+
+ self.checkout_tools()
+
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ balrogReady = True
+ for locale in locales:
+ apk_url = self.query_upload_url(locale)
+ if not apk_url:
+ self.add_failure(locale, message="Failed to detect %s url in make upload!" % (locale))
+ balrogReady = False
+ continue
+ if not balrogReady:
+ return self.fatal(message="Not all repacks successful, abort without submitting to balrog")
+
+ for locale in locales:
+ apkfile = self.query_apkfile_path(locale)
+ apk_url = self.query_upload_url(locale)
+
+ # Set other necessary properties for Balrog submission. None need to
+ # be passed back to buildbot, so we won't write them to the properties
+ #files.
+ self.set_buildbot_property("locale", locale)
+
+ self.set_buildbot_property("appVersion", self.query_version())
+ # The Balrog submitter translates this platform into a build target
+ # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
+ self.set_buildbot_property("platform", self.buildbot_config["properties"]["platform"])
+ #TODO: Is there a better way to get this?
+
+ self.set_buildbot_property("appName", "Fennec")
+ # TODO: don't hardcode
+ self.set_buildbot_property("hashType", "sha512")
+ self.set_buildbot_property("completeMarSize", self.query_filesize(apkfile))
+ self.set_buildbot_property("completeMarHash", self.query_sha512sum(apkfile))
+ self.set_buildbot_property("completeMarUrl", apk_url)
+ self.set_buildbot_property("isOSUpdate", False)
+ self.set_buildbot_property("buildid", self.query_buildid())
+
+ if self.query_is_nightly():
+ self.submit_balrog_updates(release_type="nightly")
+ else:
+ self.submit_balrog_updates(release_type="release")
+ if not self.query_is_nightly():
+ self.submit_balrog_release_pusher(dirs)
+
+# main {{{1
+if __name__ == '__main__':
+ single_locale = MobileSingleLocale()
+ single_locale.run_and_exit()
diff --git a/testing/mozharness/scripts/mobile_partner_repack.py b/testing/mozharness/scripts/mobile_partner_repack.py
new file mode 100755
index 000000000..8d99f825a
--- /dev/null
+++ b/testing/mozharness/scripts/mobile_partner_repack.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""mobile_partner_repack.py
+
+"""
+
+from copy import deepcopy
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import ZipErrorList
+from mozharness.base.log import FATAL
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.l10n.locales import LocalesMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.mozilla.signing import MobileSigningMixin
+
+SUPPORTED_PLATFORMS = ["android"]
+
+
+# MobilePartnerRepack {{{1
+class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
+ TransferMixin, MercurialScript):
+ config_options = [[
+ ['--locale', ],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to repack"
+ }
+ ], [
+ ['--partner', ],
+ {"action": "extend",
+ "dest": "partners",
+ "type": "string",
+ "help": "Specify the partner(s) to repack"
+ }
+ ], [
+ ['--locales-file', ],
+ {"action": "store",
+ "dest": "locales_file",
+ "type": "string",
+ "help": "Specify a json file to determine which locales to repack"
+ }
+ ], [
+ ['--tag-override', ],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"
+ }
+ ], [
+ ['--platform', ],
+ {"action": "extend",
+ "dest": "platforms",
+ "type": "choice",
+ "choices": SUPPORTED_PLATFORMS,
+ "help": "Specify the platform(s) to repack"
+ }
+ ], [
+ ['--user-repo-override', ],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"
+ }
+ ], [
+ ['--release-config-file', ],
+ {"action": "store",
+ "dest": "release_config_file",
+ "type": "string",
+ "help": "Specify the release config file to use"
+ }
+ ], [
+ ['--version', ],
+ {"action": "store",
+ "dest": "version",
+ "type": "string",
+ "help": "Specify the current version"
+ }
+ ], [
+ ['--buildnum', ],
+ {"action": "store",
+ "dest": "buildnum",
+ "type": "int",
+ "default": 1,
+ "metavar": "INT",
+ "help": "Specify the current release build num (e.g. build1, build2)"
+ }
+ ]]
+
+ def __init__(self, require_config_file=True):
+ self.release_config = {}
+ LocalesMixin.__init__(self)
+ MercurialScript.__init__(
+ self,
+ config_options=self.config_options,
+ all_actions=[
+ "passphrase",
+ "clobber",
+ "pull",
+ "download",
+ "repack",
+ "upload-unsigned-bits",
+ "sign",
+ "upload-signed-bits",
+ "summary",
+ ],
+ require_config_file=require_config_file
+ )
+
+ # Helper methods {{{2
+ def add_failure(self, platform, locale, **kwargs):
+ s = "%s:%s" % (platform, locale)
+ if 'message' in kwargs:
+ kwargs['message'] = kwargs['message'] % {'platform': platform, 'locale': locale}
+ super(MobilePartnerRepack, self).add_failure(s, **kwargs)
+
+ def query_failure(self, platform, locale):
+ s = "%s:%s" % (platform, locale)
+ return super(MobilePartnerRepack, self).query_failure(s)
+
+ # Actions {{{2
+
+ def pull(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ repos = []
+ replace_dict = {}
+ if c.get("user_repo_override"):
+ replace_dict['user_repo_override'] = c['user_repo_override']
+ # deepcopy() needed because of self.config lock bug :(
+ for repo_dict in deepcopy(c['repos']):
+ repo_dict['repo'] = repo_dict['repo'] % replace_dict
+ repos.append(repo_dict)
+ else:
+ repos = c['repos']
+ self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
+ tag_override=c.get('tag_override'))
+
+ def download(self):
+ c = self.config
+ rc = self.query_release_config()
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ replace_dict = {
+ 'buildnum': rc['buildnum'],
+ 'version': rc['version'],
+ }
+ success_count = total_count = 0
+ for platform in c['platforms']:
+ base_installer_name = c['installer_base_names'][platform]
+ base_url = c['download_base_url'] + '/' + \
+ c['download_unsigned_base_subdir'] + '/' + \
+ base_installer_name
+ replace_dict['platform'] = platform
+ for locale in locales:
+ replace_dict['locale'] = locale
+ url = base_url % replace_dict
+ installer_name = base_installer_name % replace_dict
+ parent_dir = '%s/original/%s/%s' % (dirs['abs_work_dir'],
+ platform, locale)
+ file_path = '%s/%s' % (parent_dir, installer_name)
+ self.mkdir_p(parent_dir)
+ total_count += 1
+ if not self.download_file(url, file_path):
+ self.add_failure(platform, locale,
+ message="Unable to download %(platform)s:%(locale)s installer!")
+ else:
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Downloaded %d of %d installers successfully.")
+
+ def _repack_apk(self, partner, orig_path, repack_path):
+ """ Repack the apk with a partner update channel.
+ Returns True for success, None for failure
+ """
+ dirs = self.query_abs_dirs()
+ zip_bin = self.query_exe("zip")
+ unzip_bin = self.query_exe("unzip")
+ file_name = os.path.basename(orig_path)
+ tmp_dir = os.path.join(dirs['abs_work_dir'], 'tmp')
+ tmp_file = os.path.join(tmp_dir, file_name)
+ tmp_prefs_dir = os.path.join(tmp_dir, 'defaults', 'pref')
+ # Error checking for each step.
+ # Ignoring the mkdir_p()s since the subsequent copyfile()s will
+ # error out if unsuccessful.
+ if self.rmtree(tmp_dir):
+ return
+ self.mkdir_p(tmp_prefs_dir)
+ if self.copyfile(orig_path, tmp_file):
+ return
+ if self.write_to_file(os.path.join(tmp_prefs_dir, 'partner.js'),
+ 'pref("app.partner.%s", "%s");' % (partner, partner)
+ ) is None:
+ return
+ if self.run_command([unzip_bin, '-q', file_name, 'omni.ja'],
+ error_list=ZipErrorList,
+ return_type='num_errors',
+ cwd=tmp_dir):
+ self.error("Can't extract omni.ja from %s!" % file_name)
+ return
+ if self.run_command([zip_bin, '-9r', 'omni.ja',
+ 'defaults/pref/partner.js'],
+ error_list=ZipErrorList,
+ return_type='num_errors',
+ cwd=tmp_dir):
+ self.error("Can't add partner.js to omni.ja!")
+ return
+ if self.run_command([zip_bin, '-9r', file_name, 'omni.ja'],
+ error_list=ZipErrorList,
+ return_type='num_errors',
+ cwd=tmp_dir):
+ self.error("Can't re-add omni.ja to %s!" % file_name)
+ return
+ if self.unsign_apk(tmp_file):
+ return
+ repack_dir = os.path.dirname(repack_path)
+ self.mkdir_p(repack_dir)
+ if self.copyfile(tmp_file, repack_path):
+ return
+ return True
+
+ def repack(self):
+ c = self.config
+ rc = self.query_release_config()
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ success_count = total_count = 0
+ for platform in c['platforms']:
+ for locale in locales:
+ installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
+ if self.query_failure(platform, locale):
+ self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
+ continue
+ original_path = '%s/original/%s/%s/%s' % (dirs['abs_work_dir'], platform, locale, installer_name)
+ for partner in c['partner_config'].keys():
+ repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
+ total_count += 1
+ if self._repack_apk(partner, original_path, repack_path):
+ success_count += 1
+ else:
+ self.add_failure(platform, locale,
+ message="Unable to repack %(platform)s:%(locale)s installer!")
+ self.summarize_success_count(success_count, total_count,
+ message="Repacked %d of %d installers successfully.")
+
+ def _upload(self, dir_name="unsigned/partner-repacks"):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ local_path = os.path.join(dirs['abs_work_dir'], dir_name)
+ rc = self.query_release_config()
+ replace_dict = {
+ 'buildnum': rc['buildnum'],
+ 'version': rc['version'],
+ }
+ remote_path = '%s/%s' % (c['ftp_upload_base_dir'] % replace_dict, dir_name)
+ if self.rsync_upload_directory(local_path, c['ftp_ssh_key'],
+ c['ftp_user'], c['ftp_server'],
+ remote_path):
+ self.return_code += 1
+
+ def upload_unsigned_bits(self):
+ self._upload()
+
+ # passphrase() in AndroidSigningMixin
+ # verify_passphrases() in AndroidSigningMixin
+
+ def preflight_sign(self):
+ if 'passphrase' not in self.actions:
+ self.passphrase()
+ self.verify_passphrases()
+
+ def sign(self):
+ c = self.config
+ rc = self.query_release_config()
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ success_count = total_count = 0
+ for platform in c['platforms']:
+ for locale in locales:
+ installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
+ if self.query_failure(platform, locale):
+ self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
+ continue
+ for partner in c['partner_config'].keys():
+ unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
+ signed_dir = '%s/partner-repacks/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale)
+ signed_path = "%s/%s" % (signed_dir, installer_name)
+ total_count += 1
+ self.info("Signing %s %s." % (platform, locale))
+ if not os.path.exists(unsigned_path):
+ self.error("Missing apk %s!" % unsigned_path)
+ continue
+ if self.sign_apk(unsigned_path, c['keystore'],
+ self.store_passphrase, self.key_passphrase,
+ c['key_alias']) != 0:
+ self.add_summary("Unable to sign %s:%s apk!" % (platform, locale), level=FATAL)
+ else:
+ self.mkdir_p(signed_dir)
+ if self.align_apk(unsigned_path, signed_path):
+ self.add_failure(platform, locale,
+ message="Unable to align %(platform)s%(locale)s apk!")
+ self.rmtree(signed_dir)
+ else:
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Signed %d of %d apks successfully.")
+
+ # TODO verify signatures.
+
+ def upload_signed_bits(self):
+ self._upload(dir_name="partner-repacks")
+
+
+# main {{{1
+if __name__ == '__main__':
+ mobile_partner_repack = MobilePartnerRepack()
+ mobile_partner_repack.run_and_exit()
diff --git a/testing/mozharness/scripts/multil10n.py b/testing/mozharness/scripts/multil10n.py
new file mode 100755
index 000000000..c89caf7c6
--- /dev/null
+++ b/testing/mozharness/scripts/multil10n.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""multil10n.py
+
+"""
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.l10n.multi_locale_build import MultiLocaleBuild
+
+if __name__ == '__main__':
+ multi_locale_build = MultiLocaleBuild()
+ multi_locale_build.run_and_exit()
diff --git a/testing/mozharness/scripts/openh264_build.py b/testing/mozharness/scripts/openh264_build.py
new file mode 100644
index 000000000..072d102d5
--- /dev/null
+++ b/testing/mozharness/scripts/openh264_build.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import sys
+import os
+import glob
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+# import the guts
+from mozharness.base.vcs.vcsbase import VCSScript
+from mozharness.base.log import ERROR
+from mozharness.base.transfer import TransferMixin
+from mozharness.mozilla.mock import MockMixin
+
+
+class OpenH264Build(MockMixin, TransferMixin, VCSScript):
+ all_actions = [
+ 'clobber',
+ 'checkout-sources',
+ 'build',
+ 'test',
+ 'package',
+ 'upload',
+ ]
+
+ default_actions = [
+ 'checkout-sources',
+ 'build',
+ 'test',
+ 'package',
+ ]
+
+ config_options = [
+ [["--repo"], {
+ "dest": "repo",
+ "help": "OpenH264 repository to use",
+ "default": "https://github.com/cisco/openh264.git"
+ }],
+ [["--rev"], {
+ "dest": "revision",
+ "help": "revision to checkout",
+ "default": "master"
+ }],
+ [["--debug"], {
+ "dest": "debug_build",
+ "action": "store_true",
+ "help": "Do a debug build",
+ }],
+ [["--64"], {
+ "dest": "64bit",
+ "action": "store_true",
+ "help": "Do a 64-bit build",
+ "default": True,
+ }],
+ [["--32"], {
+ "dest": "64bit",
+ "action": "store_false",
+ "help": "Do a 32-bit build",
+ }],
+ [["--os"], {
+ "dest": "operating_system",
+ "help": "Specify the operating system to build for",
+ }],
+ [["--use-mock"], {
+ "dest": "use_mock",
+ "help": "use mock to set up build environment",
+ "action": "store_true",
+ "default": False,
+ }],
+ [["--use-yasm"], {
+ "dest": "use_yasm",
+ "help": "use yasm instead of nasm",
+ "action": "store_true",
+ "default": False,
+ }],
+ ]
+
+ def __init__(self, require_config_file=False, config={},
+ all_actions=all_actions,
+ default_actions=default_actions):
+
+ # Default configuration
+ default_config = {
+ 'debug_build': False,
+ 'mock_target': 'mozilla-centos6-x86_64',
+ 'mock_packages': ['make', 'git', 'nasm', 'glibc-devel.i686', 'libstdc++-devel.i686', 'zip', 'yasm'],
+ 'mock_files': [],
+ 'upload_ssh_key': os.path.expanduser("~/.ssh/ffxbld_rsa"),
+ 'upload_ssh_user': 'ffxbld',
+ 'upload_ssh_host': 'stage.mozilla.org',
+ 'upload_path_base': '/home/ffxbld/openh264',
+ 'use_yasm': False,
+ }
+ default_config.update(config)
+
+ VCSScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config=default_config,
+ all_actions=all_actions,
+ default_actions=default_actions,
+ )
+
+ if self.config['use_mock']:
+ self.setup_mock()
+ self.enable_mock()
+
+ def query_package_name(self):
+ if self.config['64bit']:
+ bits = '64'
+ else:
+ bits = '32'
+
+ version = self.config['revision']
+
+ if sys.platform == 'linux2':
+ if self.config.get('operating_system') == 'android':
+ return 'openh264-android-{version}.zip'.format(version=version, bits=bits)
+ else:
+ return 'openh264-linux{bits}-{version}.zip'.format(version=version, bits=bits)
+ elif sys.platform == 'darwin':
+ return 'openh264-macosx{bits}-{version}.zip'.format(version=version, bits=bits)
+ elif sys.platform == 'win32':
+ return 'openh264-win{bits}-{version}.zip'.format(version=version, bits=bits)
+ self.fatal("can't determine platform")
+
+ def query_make_params(self):
+ retval = []
+ if self.config['debug_build']:
+ retval.append('BUILDTYPE=Debug')
+
+ if self.config['64bit']:
+ retval.append('ENABLE64BIT=Yes')
+ else:
+ retval.append('ENABLE64BIT=No')
+
+ if "operating_system" in self.config:
+ retval.append("OS=%s" % self.config['operating_system'])
+
+ if self.config['use_yasm']:
+ retval.append('ASM=yasm')
+
+ return retval
+
+ def query_upload_ssh_key(self):
+ return self.config['upload_ssh_key']
+
+ def query_upload_ssh_host(self):
+ return self.config['upload_ssh_host']
+
+ def query_upload_ssh_user(self):
+ return self.config['upload_ssh_user']
+
+ def query_upload_ssh_path(self):
+ return "%s/%s" % (self.config['upload_path_base'], self.config['revision'])
+
+ def run_make(self, target):
+ cmd = ['make', target] + self.query_make_params()
+ dirs = self.query_abs_dirs()
+ repo_dir = os.path.join(dirs['abs_work_dir'], 'src')
+ return self.run_command(cmd, cwd=repo_dir)
+
+ def checkout_sources(self):
+ repo = self.config['repo']
+ rev = self.config['revision']
+
+ dirs = self.query_abs_dirs()
+ repo_dir = os.path.join(dirs['abs_work_dir'], 'src')
+
+ repos = [
+ {'vcs': 'gittool', 'repo': repo, 'dest': repo_dir, 'revision': rev},
+ ]
+
+ # self.vcs_checkout already retries, so no need to wrap it in
+ # self.retry. We set the error_level to ERROR to prevent it going fatal
+ # so we can do our own handling here.
+ retval = self.vcs_checkout_repos(repos, error_level=ERROR)
+ if not retval:
+ self.rmtree(repo_dir)
+ self.fatal("Automation Error: couldn't clone repo", exit_code=4)
+
+ # Checkout gmp-api
+ # TODO: Nothing here updates it yet, or enforces versions!
+ if not os.path.exists(os.path.join(repo_dir, 'gmp-api')):
+ retval = self.run_make('gmp-bootstrap')
+ if retval != 0:
+ self.fatal("couldn't bootstrap gmp")
+ else:
+ self.info("skipping gmp bootstrap - we have it locally")
+
+ # Checkout gtest
+ # TODO: Requires svn!
+ if not os.path.exists(os.path.join(repo_dir, 'gtest')):
+ retval = self.run_make('gtest-bootstrap')
+ if retval != 0:
+ self.fatal("couldn't bootstrap gtest")
+ else:
+ self.info("skipping gtest bootstrap - we have it locally")
+
+ return retval
+
+ def build(self):
+ retval = self.run_make('plugin')
+ if retval != 0:
+ self.fatal("couldn't build plugin")
+
+ def package(self):
+ dirs = self.query_abs_dirs()
+ srcdir = os.path.join(dirs['abs_work_dir'], 'src')
+ package_name = self.query_package_name()
+ package_file = os.path.join(dirs['abs_work_dir'], package_name)
+ if os.path.exists(package_file):
+ os.unlink(package_file)
+ to_package = [os.path.basename(f) for f in glob.glob(os.path.join(srcdir, "*gmpopenh264*"))]
+ cmd = ['zip', package_file] + to_package
+ retval = self.run_command(cmd, cwd=srcdir)
+ if retval != 0:
+ self.fatal("couldn't make package")
+ self.copy_to_upload_dir(package_file)
+
+ def upload(self):
+ if self.config['use_mock']:
+ self.disable_mock()
+ dirs = self.query_abs_dirs()
+ self.rsync_upload_directory(
+ dirs['abs_upload_dir'],
+ self.query_upload_ssh_key(),
+ self.query_upload_ssh_user(),
+ self.query_upload_ssh_host(),
+ self.query_upload_ssh_path(),
+ )
+ if self.config['use_mock']:
+ self.enable_mock()
+
+ def test(self):
+ retval = self.run_make('test')
+ if retval != 0:
+ self.fatal("test failures")
+
+
+# main {{{1
+if __name__ == '__main__':
+ myScript = OpenH264Build()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/antivirus.py b/testing/mozharness/scripts/release/antivirus.py
new file mode 100644
index 000000000..b40dc5cc0
--- /dev/null
+++ b/testing/mozharness/scripts/release/antivirus.py
@@ -0,0 +1,193 @@
+from multiprocessing.pool import ThreadPool
+import os
+import re
+import sys
+import shutil
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+
+
+class AntivirusScan(BaseScript, VirtualenvMixin):
+ config_options = [
+ [["--product"], {
+ "dest": "product",
+ "help": "Product being released, eg: firefox, thunderbird",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name"], {
+ "dest": "bucket_name",
+ "help": "S3 Bucket to retrieve files from",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "action": "append",
+ "help": "List of filename patterns to exclude. See script source for default",
+ }],
+ [["-d", "--download-parallelization"], {
+ "dest": "download_parallelization",
+ "default": 6,
+ "type": "int",
+ "help": "Number of concurrent file downloads",
+ }],
+ [["-s", "--scan-parallelization"], {
+ "dest": "scan_parallelization",
+ "default": 4,
+ "type": "int",
+ "help": "Number of concurrent file scans",
+ }],
+ [["--tools-repo"], {
+ "dest": "tools_repo",
+ "default": "https://hg.mozilla.org/build/tools",
+ }],
+ [["--tools-revision"], {
+ "dest": "tools_revision",
+ "help": "Revision of tools repo to use when downloading extract_and_run_command.py",
+ }],
+ ] + virtualenv_config_options
+
+ DEFAULT_EXCLUDES = [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*\.asc$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*contrib.*"
+ ]
+ CACHE_DIR = 'cache'
+
+ def __init__(self):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ "redo",
+ "mar",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "get-extract-script",
+ "get-files",
+ "scan-files",
+ "cleanup-cache",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "get-extract-script",
+ "get-files",
+ "scan-files",
+ "cleanup-cache",
+ ],
+ )
+ self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES)
+ self.dest_dir = self.CACHE_DIR
+
+ def _get_candidates_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config['product'],
+ self.config["version"],
+ self.config["build_number"]
+ )
+
+ def _matches_exclude(self, keyname):
+ for exclude in self.excludes:
+ if re.search(exclude, keyname):
+ return True
+ return False
+
+ def get_extract_script(self):
+ """Gets a copy of extract_and_run_command.py from tools, and the supporting mar.py,
+ so that we can unpack various files for clam to scan them."""
+ remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py".format(self.config["tools_repo"],
+ self.config["tools_revision"])
+ self.download_file(remote_file, file_name="extract_and_run_command.py")
+
+ def get_files(self):
+ """Pull the candidate files down from S3 for scanning, using parallel requests"""
+ from boto.s3.connection import S3Connection
+ from boto.exception import S3CopyError, S3ResponseError
+ from redo import retry
+ from httplib import HTTPException
+
+ # suppress boto debug logging, it's too verbose with --loglevel=debug
+ import logging
+ logging.getLogger('boto').setLevel(logging.INFO)
+
+ self.info("Connecting to S3")
+ conn = S3Connection(anon=True)
+ self.info("Getting bucket {}".format(self.config["bucket_name"]))
+ bucket = conn.get_bucket(self.config["bucket_name"])
+
+ if os.path.exists(self.dest_dir):
+ self.info('Emptying {}'.format(self.dest_dir))
+ shutil.rmtree(self.dest_dir)
+ os.makedirs(self.dest_dir)
+
+ def worker(item):
+ source, destination = item
+
+ self.info("Downloading {} to {}".format(source, destination))
+ key = bucket.get_key(source)
+ return retry(key.get_contents_to_filename,
+ args=(destination, ),
+ sleeptime=30, max_sleeptime=150,
+ retry_exceptions=(S3CopyError, S3ResponseError,
+ IOError, HTTPException))
+
+ def find_release_files():
+ candidates_prefix = self._get_candidates_prefix()
+ self.info("Getting key names from candidates")
+ for key in bucket.list(prefix=candidates_prefix):
+ keyname = key.name
+ if self._matches_exclude(keyname):
+ self.debug("Excluding {}".format(keyname))
+ else:
+ destination = os.path.join(self.dest_dir, keyname.replace(candidates_prefix, ''))
+ dest_dir = os.path.dirname(destination)
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ yield (keyname, destination)
+
+ pool = ThreadPool(self.config["download_parallelization"])
+ pool.map(worker, find_release_files())
+
+ def scan_files(self):
+ """Scan the files we've collected. We do the download and scan concurrently to make
+ it easier to have a coherent log afterwards. Uses the venv python."""
+ self.run_command([self.query_python_path(), 'extract_and_run_command.py',
+ '-j{}'.format(self.config['scan_parallelization']),
+ 'clamdscan', '-m', '--no-summary', '--', self.dest_dir])
+
+ def cleanup_cache(self):
+ """If we have simultaneous releases in flight an av slave may end up doing another
+ av job before being recycled, and we need to make sure the full disk is available."""
+ shutil.rmtree(self.dest_dir)
+
+
+if __name__ == "__main__":
+ myScript = AntivirusScan()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/beet_mover.py b/testing/mozharness/scripts/release/beet_mover.py
new file mode 100755
index 000000000..adc8b19e1
--- /dev/null
+++ b/testing/mozharness/scripts/release/beet_mover.py
@@ -0,0 +1,372 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""beet_mover.py.
+
+downloads artifacts, scans them and uploads them to s3
+"""
+import hashlib
+import sys
+import os
+import pprint
+import re
+from os import listdir
+from os.path import isfile, join
+import sh
+import redo
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.log import FATAL
+from mozharness.base.python import VirtualenvMixin
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.aws import pop_aws_auth_from_env
+import mozharness
+import mimetypes
+
+
+def get_hash(content, hash_type="md5"):
+ h = hashlib.new(hash_type)
+ h.update(content)
+ return h.hexdigest()
+
+
+CONFIG_OPTIONS = [
+ [["--template"], {
+ "dest": "template",
+ "help": "Specify jinja2 template file",
+ }],
+ [['--locale', ], {
+ "action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to upload."}],
+ [["--platform"], {
+ "dest": "platform",
+ "help": "Specify the platform of the build",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "full release version based on gecko and tag/stage identifier. e.g. '44.0b1'"
+ }],
+ [["--app-version"], {
+ "dest": "app_version",
+ "help": "numbered version based on gecko. e.g. '44.0'"
+ }],
+ [["--partial-version"], {
+ "dest": "partial_version",
+ "help": "the partial version the mar is based off of"
+ }],
+ [["--artifact-subdir"], {
+ "dest": "artifact_subdir",
+ "default": 'build',
+ "help": "subdir location for taskcluster artifacts after public/ base.",
+ }],
+ [["--build-num"], {
+ "dest": "build_num",
+ "help": "the release build identifier"
+ }],
+ [["--taskid"], {
+ "dest": "taskid",
+ "help": "taskcluster task id to download artifacts from",
+ }],
+ [["--bucket"], {
+ "dest": "bucket",
+ "help": "s3 bucket to move beets to.",
+ }],
+ [["--product"], {
+ "dest": "product",
+ "help": "product for which artifacts are beetmoved",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "action": "append",
+ "help": "List of filename patterns to exclude. See script source for default",
+ }],
+ [["-s", "--scan-parallelization"], {
+ "dest": "scan_parallelization",
+ "default": 4,
+ "type": "int",
+ "help": "Number of concurrent file scans",
+ }],
+]
+
+DEFAULT_EXCLUDES = [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*\.asc$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*contrib.*"
+]
+CACHE_DIR = 'cache'
+
+MIME_MAP = {
+ '': 'text/plain',
+ '.asc': 'text/plain',
+ '.beet': 'text/plain',
+ '.bundle': 'application/octet-stream',
+ '.bz2': 'application/octet-stream',
+ '.checksums': 'text/plain',
+ '.dmg': 'application/x-iso9660-image',
+ '.mar': 'application/octet-stream',
+ '.xpi': 'application/x-xpinstall'
+}
+
+HASH_FORMATS = ["sha512", "sha256"]
+
+
+class BeetMover(BaseScript, VirtualenvMixin, object):
+ def __init__(self, aws_creds):
+ beetmover_kwargs = {
+ 'config_options': CONFIG_OPTIONS,
+ 'all_actions': [
+ # 'clobber',
+ 'create-virtualenv',
+ 'activate-virtualenv',
+ 'generate-candidates-manifest',
+ 'refresh-antivirus',
+ 'verify-bits', # beets
+ 'download-bits', # beets
+ 'scan-bits', # beets
+ 'upload-bits', # beets
+ ],
+ 'require_config_file': False,
+ # Default configuration
+ 'config': {
+ # base index url where to find taskcluster artifact based on taskid
+ "artifact_base_url": 'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
+ "virtualenv_modules": [
+ "boto",
+ "PyYAML",
+ "Jinja2",
+ "redo",
+ "cryptography==2.0.3",
+ "mar",
+ ],
+ "virtualenv_path": "venv",
+ },
+ }
+ #todo do excludes need to be configured via command line for specific builds?
+ super(BeetMover, self).__init__(**beetmover_kwargs)
+
+ c = self.config
+ self.manifest = {}
+ # assigned in _post_create_virtualenv
+ self.virtualenv_imports = None
+ self.bucket = c['bucket']
+ if not all(aws_creds):
+ self.fatal('credentials must be passed in env: "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
+ self.aws_key_id, self.aws_secret_key = aws_creds
+ # if excludes is set from command line, use it otherwise use defaults
+ self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
+ dirs = self.query_abs_dirs()
+ self.dest_dir = os.path.join(dirs['abs_work_dir'], CACHE_DIR)
+ self.mime_fix()
+
+ def activate_virtualenv(self):
+ """
+ activates virtualenv and adds module imports to a instance wide namespace.
+
+ creating and activating a virtualenv onto the currently executing python interpreter is a
+ bit black magic. Rather than having import statements added in various places within the
+ script, we import them here immediately after we activate the newly created virtualenv
+ """
+ VirtualenvMixin.activate_virtualenv(self)
+
+ import boto
+ import yaml
+ import jinja2
+ self.virtualenv_imports = {
+ 'boto': boto,
+ 'yaml': yaml,
+ 'jinja2': jinja2,
+ }
+ self.log("activated virtualenv with the modules: {}".format(str(self.virtualenv_imports)))
+
+ def _get_template_vars(self):
+ return {
+ "platform": self.config['platform'],
+ "locales": self.config.get('locales'),
+ "version": self.config['version'],
+ "app_version": self.config.get('app_version', ''),
+ "partial_version": self.config.get('partial_version', ''),
+ "build_num": self.config['build_num'],
+ # keep the trailing slash
+ "s3_prefix": 'pub/{prod}/candidates/{ver}-candidates/{n}/'.format(
+ prod=self.config['product'], ver=self.config['version'],
+ n=self.config['build_num']
+ ),
+ "artifact_base_url": self.config['artifact_base_url'].format(
+ taskid=self.config['taskid'], subdir=self.config['artifact_subdir']
+ )
+ }
+
+ def generate_candidates_manifest(self):
+ """
+ generates and outputs a manifest that maps expected Taskcluster artifact names
+ to release deliverable names
+ """
+ self.log('generating manifest from {}...'.format(self.config['template']))
+ template_dir, template_file = os.path.split(os.path.abspath(self.config['template']))
+ jinja2 = self.virtualenv_imports['jinja2']
+ yaml = self.virtualenv_imports['yaml']
+
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ undefined=jinja2.StrictUndefined)
+ template = jinja_env.get_template(template_file)
+ self.manifest = yaml.safe_load(template.render(**self._get_template_vars()))
+
+ self.log("manifest generated:")
+ self.log(pprint.pformat(self.manifest['mapping']))
+
+ def verify_bits(self):
+ """
+ inspects each artifact and verifies that they were created by trustworthy tasks
+ """
+ # TODO
+ self.log('skipping verification. unimplemented...')
+
+ def refresh_antivirus(self):
+ self.info("Refreshing clamav db...")
+ try:
+ redo.retry(lambda:
+ sh.freshclam("--stdout", "--verbose", _timeout=300,
+ _err_to_out=True))
+ self.info("Done.")
+ except sh.ErrorReturnCode:
+ self.warning("Freshclam failed, skipping DB update")
+
+ def download_bits(self):
+ """
+ downloads list of artifacts to self.dest_dir dir based on a given manifest
+ """
+ self.log('downloading and uploading artifacts to self_dest_dir...')
+ dirs = self.query_abs_dirs()
+
+ for locale in self.manifest['mapping']:
+ for deliverable in self.manifest['mapping'][locale]:
+ self.log("downloading '{}' deliverable for '{}' locale".format(deliverable, locale))
+ source = self.manifest['mapping'][locale][deliverable]['artifact']
+ self.retry(
+ self.download_file,
+ args=[source],
+ kwargs={'parent_dir': dirs['abs_work_dir']},
+ error_level=FATAL)
+ self.log('Success!')
+
+ def _strip_prefix(self, s3_key):
+ """Return file name relative to prefix"""
+ # "abc/def/hfg".split("abc/de")[-1] == "f/hfg"
+ return s3_key.split(self._get_template_vars()["s3_prefix"])[-1]
+
+ def upload_bits(self):
+ """
+ uploads list of artifacts to s3 candidates dir based on a given manifest
+ """
+ self.log('uploading artifacts to s3...')
+ dirs = self.query_abs_dirs()
+
+ # connect to s3
+ boto = self.virtualenv_imports['boto']
+ conn = boto.connect_s3(self.aws_key_id, self.aws_secret_key)
+ bucket = conn.get_bucket(self.bucket)
+
+ for locale in self.manifest['mapping']:
+ for deliverable in self.manifest['mapping'][locale]:
+ self.log("uploading '{}' deliverable for '{}' locale".format(deliverable, locale))
+ # we have already downloaded the files locally so we can use that version
+ source = self.manifest['mapping'][locale][deliverable]['artifact']
+ s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
+ downloaded_file = os.path.join(dirs['abs_work_dir'], self.get_filename_from_url(source))
+ # generate checksums for every uploaded file
+ beet_file_name = '{}.beet'.format(downloaded_file)
+ # upload checksums to a separate subdirectory
+ beet_dest = '{prefix}beetmover-checksums/{f}.beet'.format(
+ prefix=self._get_template_vars()["s3_prefix"],
+ f=self._strip_prefix(s3_key)
+ )
+ beet_contents = '\n'.join([
+ '{hash} {fmt} {size} {name}'.format(
+ hash=self.get_hash_for_file(downloaded_file, hash_type=fmt),
+ fmt=fmt,
+ size=os.path.getsize(downloaded_file),
+ name=self._strip_prefix(s3_key)) for fmt in HASH_FORMATS
+ ])
+ self.write_to_file(beet_file_name, beet_contents)
+ self.upload_bit(source=downloaded_file, s3_key=s3_key,
+ bucket=bucket)
+ self.upload_bit(source=beet_file_name, s3_key=beet_dest,
+ bucket=bucket)
+ self.log('Success!')
+
+
+ def upload_bit(self, source, s3_key, bucket):
+ boto = self.virtualenv_imports['boto']
+ self.info('uploading to s3 with key: {}'.format(s3_key))
+ key = boto.s3.key.Key(bucket) # create new key
+ key.key = s3_key # set key name
+
+ self.info("Checking if `{}` already exists".format(s3_key))
+ key = bucket.get_key(s3_key)
+ if not key:
+ self.info("Uploading to `{}`".format(s3_key))
+ key = bucket.new_key(s3_key)
+ # set key value
+ mime_type, _ = mimetypes.guess_type(source)
+ self.retry(lambda: key.set_contents_from_filename(source, headers={'Content-Type': mime_type}),
+ error_level=FATAL),
+ else:
+ if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
+ # for now, let's halt. If necessary, we can revisit this and allow for overwrites
+ # to the same buildnum release with different bits
+ self.fatal("`{}` already exists with different checksum.".format(s3_key))
+ self.log("`{}` has the same MD5 checksum, not uploading".format(s3_key))
+
+ def scan_bits(self):
+
+ dirs = self.query_abs_dirs()
+
+ filenames = [f for f in listdir(dirs['abs_work_dir']) if isfile(join(dirs['abs_work_dir'], f))]
+ self.mkdir_p(self.dest_dir)
+ for file_name in filenames:
+ if self._matches_exclude(file_name):
+ self.info("Excluding {} from virus scan".format(file_name))
+ else:
+ self.info('Copying {} to {}'.format(file_name,self.dest_dir))
+ self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name))
+ self._scan_files()
+ self.info('Emptying {}'.format(self.dest_dir))
+ self.rmtree(self.dest_dir)
+
+ def _scan_files(self):
+ """Scan the files we've collected. We do the download and scan concurrently to make
+ it easier to have a coherent log afterwards. Uses the venv python."""
+ external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools')
+ self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'),
+ '-j{}'.format(self.config['scan_parallelization']),
+ 'clamscan', '--no-summary', '--', self.dest_dir])
+
+ def _matches_exclude(self, keyname):
+ return any(re.search(exclude, keyname) for exclude in self.excludes)
+
+ def mime_fix(self):
+ """ Add mimetypes for custom extensions """
+ mimetypes.init()
+ map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
+
+if __name__ == '__main__':
+ beet_mover = BeetMover(pop_aws_auth_from_env())
+ beet_mover.run_and_exit()
diff --git a/testing/mozharness/scripts/release/generate-checksums.py b/testing/mozharness/scripts/release/generate-checksums.py
new file mode 100644
index 000000000..61a1c43d2
--- /dev/null
+++ b/testing/mozharness/scripts/release/generate-checksums.py
@@ -0,0 +1,284 @@
+from multiprocessing.pool import ThreadPool
+import os
+from os import path
+import re
+import sys
+import posixpath
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.checksums import parse_checksums_file
+from mozharness.mozilla.signing import SigningMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, BuildbotMixin):
+ config_options = [
+ [["--stage-product"], {
+ "dest": "stage_product",
+ "help": "Name of product used in file server's directory structure, eg: firefox, mobile",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name-prefix"], {
+ "dest": "bucket_name_prefix",
+ "help": "Prefix of bucket name, eg: net-mozaws-prod-delivery. This will be used to generate a full bucket name (such as net-mozaws-prod-delivery-{firefox,archive}.",
+ }],
+ [["--bucket-name-full"], {
+ "dest": "bucket_name_full",
+ "help": "Full bucket name, eg: net-mozaws-prod-delivery-firefox",
+ }],
+ [["-j", "--parallelization"], {
+ "dest": "parallelization",
+ "default": 20,
+ "type": int,
+ "help": "Number of checksums file to download concurrently",
+ }],
+ [["-f", "--format"], {
+ "dest": "formats",
+ "default": [],
+ "action": "append",
+ "help": "Format(s) to generate big checksums file for. Default: sha512",
+ }],
+ [["--include"], {
+ "dest": "includes",
+ "default": [],
+ "action": "append",
+ "help": "List of patterns to include in big checksums file. See script source for default.",
+ }],
+ [["--tools-repo"], {
+ "dest": "tools_repo",
+ "default": "https://hg.mozilla.org/build/tools",
+ }],
+ [["--credentials"], {
+ "dest": "credentials",
+ "help": "File containing access key and secret access key for S3",
+ }],
+ ] + virtualenv_config_options
+
+ def __init__(self):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "pip==1.5.5",
+ "boto",
+ ],
+ "virtualenv_path": "venv",
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ all_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "sign",
+ "upload",
+ "copy-info-files",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "sign",
+ "upload",
+ ],
+ )
+
+ self.checksums = {}
+ self.bucket = None
+ self.bucket_name = self._get_bucket_name()
+ self.file_prefix = self._get_file_prefix()
+ # set the env var for boto to read our special config file
+ # rather than anything else we have at ~/.boto
+ os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
+
+ def _pre_config_lock(self, rw_config):
+ super(ChecksumsGenerator, self)._pre_config_lock(rw_config)
+
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['version', 'build_number']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ # These defaults are set here rather in the config because default
+ # lists cannot be completely overidden, only appended to.
+ if not self.config.get("formats"):
+ self.config["formats"] = ["sha512", "sha256"]
+
+ if not self.config.get("includes"):
+ self.config["includes"] = [
+ r"^.*\.tar\.bz2$",
+ r"^.*\.tar\.xz$",
+ r"^.*\.dmg$",
+ r"^.*\.bundle$",
+ r"^.*\.mar$",
+ r"^.*Setup.*\.exe$",
+ r"^.*\.xpi$",
+ r"^.*fennec.*\.apk$",
+ ]
+
+ def _get_bucket_name(self):
+ if self.config.get('bucket_name_full'):
+ return self.config['bucket_name_full']
+
+ suffix = "archive"
+ # Firefox has a special bucket, per https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
+ if self.config["stage_product"] == "firefox":
+ suffix = "firefox"
+
+ return "{}-{}".format(self.config["bucket_name_prefix"], suffix)
+
+ def _get_file_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config["stage_product"], self.config["version"], self.config["build_number"]
+ )
+
+ def _get_sums_filename(self, format_):
+ return "{}SUMS".format(format_.upper())
+
+ def _get_bucket(self):
+ if not self.bucket:
+ self.activate_virtualenv()
+ from boto.s3.connection import S3Connection
+
+ self.info("Connecting to S3")
+ conn = S3Connection()
+ self.debug("Successfully connected to S3")
+ self.info("Connecting to bucket {}".format(self.bucket_name))
+ self.bucket = conn.get_bucket(self.bucket_name)
+ return self.bucket
+
+ def collect_individual_checksums(self):
+ """This step grabs all of the small checksums files for the release,
+ filters out any unwanted files from within them, and adds the remainder
+ to self.checksums for subsequent steps to use."""
+ bucket = self._get_bucket()
+ self.info("File prefix is: {}".format(self.file_prefix))
+
+ # Temporary holding place for checksums
+ raw_checksums = []
+ def worker(item):
+ self.debug("Downloading {}".format(item))
+ # TODO: It would be nice to download the associated .asc file
+ # and verify against it.
+ sums = bucket.get_key(item).get_contents_as_string()
+ raw_checksums.append(sums)
+
+ def find_checksums_files():
+ self.info("Getting key names from bucket")
+ checksum_files = {"beets": [], "checksums": []}
+ for key in bucket.list(prefix=self.file_prefix):
+ if key.key.endswith(".checksums"):
+ self.debug("Found checksums file: {}".format(key.key))
+ checksum_files["checksums"].append(key.key)
+ elif key.key.endswith(".beet"):
+ self.debug("Found beet file: {}".format(key.key))
+ checksum_files["beets"].append(key.key)
+ else:
+ self.debug("Ignoring non-checksums file: {}".format(key.key))
+ if checksum_files["beets"]:
+ self.log("Using beet format")
+ return checksum_files["beets"]
+ else:
+ self.log("Using checksums format")
+ return checksum_files["checksums"]
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_checksums_files())
+
+ for c in raw_checksums:
+ for f, info in parse_checksums_file(c).iteritems():
+ for pattern in self.config["includes"]:
+ if re.search(pattern, f):
+ if f in self.checksums:
+ self.fatal("Found duplicate checksum entry for {}, don't know which one to pick.".format(f))
+ if not set(self.config["formats"]) <= set(info["hashes"]):
+ self.fatal("Missing necessary format for file {}".format(f))
+ self.debug("Adding checksums for file: {}".format(f))
+ self.checksums[f] = info
+ break
+ else:
+ self.debug("Ignoring checksums for file: {}".format(f))
+
+ def create_big_checksums(self):
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Creating big checksums file: {}".format(sums))
+ with open(sums, "w+") as output_file:
+ for fn in sorted(self.checksums):
+ output_file.write("{} {}\n".format(self.checksums[fn]["hashes"][fmt], fn))
+
+ def sign(self):
+ dirs = self.query_abs_dirs()
+
+ tools_dir = path.join(dirs["abs_work_dir"], "tools")
+ self.vcs_checkout(
+ repo=self.config["tools_repo"],
+ branch="default",
+ vcs="hg",
+ dest=tools_dir,
+ )
+
+ sign_cmd = self.query_moz_sign_cmd(formats=["gpg"])
+
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Signing big checksums file: {}".format(sums))
+ retval = self.run_command(sign_cmd + [sums])
+ if retval != 0:
+ self.fatal("Failed to sign {}".format(sums))
+
+ def upload(self):
+ # we need to provide the public side of the gpg key so that people can
+ # verify the detached signatures
+ dirs = self.query_abs_dirs()
+ tools_dir = path.join(dirs["abs_work_dir"], "tools")
+ self.copyfile(os.path.join(tools_dir, 'scripts', 'release', 'KEY'),
+ 'KEY')
+ files = ['KEY']
+
+ for fmt in self.config["formats"]:
+ files.append(self._get_sums_filename(fmt))
+ files.append("{}.asc".format(self._get_sums_filename(fmt)))
+
+ bucket = self._get_bucket()
+ for f in files:
+ dest = posixpath.join(self.file_prefix, f)
+ self.info("Uploading {} to {}".format(f, dest))
+ key = bucket.new_key(dest)
+ key.set_contents_from_filename(f, headers={'Content-Type': 'text/plain'})
+
+ def copy_info_files(self):
+ bucket = self._get_bucket()
+
+ for key in bucket.list(prefix=self.file_prefix):
+ if re.search(r'/en-US/android.*_info\.txt$', key.name):
+ self.info("Found {}".format(key.name))
+ dest = posixpath.join(self.file_prefix, posixpath.basename(key.name))
+ self.info("Copying to {}".format(dest))
+ bucket.copy_key(new_key_name=dest,
+ src_bucket_name=self.bucket_name,
+ src_key_name=key.name,
+ metadata={'Content-Type': 'text/plain'})
+
+
+if __name__ == "__main__":
+ myScript = ChecksumsGenerator()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py b/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py
new file mode 100644
index 000000000..78a60b4bc
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_bouncer_aliases.py
+
+A script to replace the old-fashion way of updating the bouncer aliaes through
+tools script.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+# PostReleaseBouncerAliases {{{1
+class PostReleaseBouncerAliases(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(PostReleaseBouncerAliases, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "redo",
+ "requests",
+ ],
+ "virtualenv_path": "venv",
+ 'credentials_file': 'oauth.txt',
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "update-bouncer-aliases",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "update-bouncer-aliases",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PostReleaseBouncerAliases, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['tuxedo_server_url', 'version']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ return
+
+ def _update_bouncer_alias(self, tuxedo_server_url, auth,
+ related_product, alias):
+ from redo import retry
+ import requests
+
+ url = "%s/create_update_alias" % tuxedo_server_url
+ data = {"alias": alias, "related_product": related_product}
+ self.log("Updating {} to point to {} using {}".format(alias,
+ related_product,
+ url))
+
+ # Wrap the real call to hide credentials from retry's logging
+ def do_update_bouncer_alias():
+ r = requests.post(url, data=data, auth=auth,
+ verify=False, timeout=60)
+ r.raise_for_status()
+
+ retry(do_update_bouncer_alias)
+
+ def update_bouncer_aliases(self):
+ tuxedo_server_url = self.config['tuxedo_server_url']
+ credentials_file = os.path.join(os.getcwd(),
+ self.config['credentials_file'])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
+ version = self.config['version']
+ for product, info in self.config["products"].iteritems():
+ if "alias" in info:
+ product_template = info["product-name"]
+ related_product = product_template % {"version": version}
+ self._update_bouncer_alias(tuxedo_server_url, auth,
+ related_product, info["alias"])
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PostReleaseBouncerAliases().run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py b/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py
new file mode 100644
index 000000000..f84b5771c
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_mark_as_shipped.py
+
+A script to automate the manual way of updating a release as shipped in Ship-it
+following its successful ship-to-the-door opertion.
+"""
+import os
+import sys
+from datetime import datetime
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+def build_release_name(product, version, buildno):
+ """Function to reconstruct the name of the release based on product,
+ version and buildnumber
+ """
+ return "{}-{}-build{}".format(product.capitalize(),
+ str(version), str(buildno))
+
+
+class MarkReleaseAsShipped(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(MarkReleaseAsShipped, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "shipitapi",
+ ],
+ "virtualenv_path": "venv",
+ "credentials_file": "oauth.txt",
+ "buildbot_json_path": "buildprops.json",
+ "timeout": 60,
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "mark-as-shipped",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "mark-as-shipped",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(MarkReleaseAsShipped, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config['properties']
+ mandatory_props = ['product', 'version', 'build_number']
+ missing_props = []
+ for prop in mandatory_props:
+ if prop in props:
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ missing_props.append(prop)
+
+ if missing_props:
+ raise Exception("%s not found in configs" % missing_props)
+
+ self.config['name'] = build_release_name(self.config['product'],
+ self.config['version'],
+ self.config['build_number'])
+
+ def mark_as_shipped(self):
+ """Method to make a simple call to Ship-it API to change a release
+ status to 'shipped'
+ """
+ credentials_file = os.path.join(os.getcwd(),
+ self.config["credentials_file"])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ ship_it_credentials = credentials["ship_it_credentials"]
+ auth = (self.config["ship_it_username"],
+ ship_it_credentials.get(self.config["ship_it_username"]))
+ api_root = self.config['ship_it_root']
+
+ from shipitapi import Release
+ release_api = Release(auth, api_root=api_root,
+ timeout=self.config['timeout'])
+ shipped_at = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
+
+ self.info("Mark the release as shipped with %s timestamp" % shipped_at)
+ release_api.update(self.config['name'],
+ status='shipped', shippedAt=shipped_at)
+
+
+if __name__ == '__main__':
+ MarkReleaseAsShipped().run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_version_bump.py b/testing/mozharness/scripts/release/postrelease_version_bump.py
new file mode 100644
index 000000000..dfffa699a
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_version_bump.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_version_bump.py
+
+A script to increase in-tree version number after shipping a release.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+
+
+# PostReleaseVersionBump {{{1
+class PostReleaseVersionBump(MercurialScript, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--next-version', ], {
+ "action": "store",
+ "dest": "next_version",
+ "type": "string",
+ "help": "Next version used in version bump",
+ }],
+ [['--ssh-user', ], {
+ "action": "store",
+ "dest": "ssh_user",
+ "type": "string",
+ "help": "SSH username with hg.mozilla.org permissions",
+ }],
+ [['--ssh-key', ], {
+ "action": "store",
+ "dest": "ssh_key",
+ "type": "string",
+ "help": "Path to SSH key.",
+ }],
+ [['--product', ], {
+ "action": "store",
+ "dest": "product",
+ "type": "string",
+ "help": "Product name",
+ }],
+ [['--version', ], {
+ "action": "store",
+ "dest": "version",
+ "type": "string",
+ "help": "Version",
+ }],
+ [['--build-number', ], {
+ "action": "store",
+ "dest": "build_number",
+ "type": "string",
+ "help": "Build number",
+ }],
+ [['--revision', ], {
+ "action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "HG revision to tag",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(PostReleaseVersionBump, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'clean-repos',
+ 'pull',
+ 'bump_postrelease',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ ],
+ default_actions=[
+ 'clean-repos',
+ 'pull',
+ 'bump_postrelease',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PostReleaseVersionBump, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ else:
+ props = self.buildbot_config["properties"]
+ for prop in ['next_version', 'product', 'version', 'build_number',
+ 'revision']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ if not self.config.get("next_version"):
+ self.fatal("Next version has to be set. Use --next-version or "
+ "pass `next_version' via buildbot properties.")
+
+ def query_abs_dirs(self):
+ """ Allow for abs_from_dir and abs_to_dir
+ """
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(PostReleaseVersionBump, self).query_abs_dirs()
+ self.abs_dirs["abs_gecko_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def query_commit_dirs(self):
+ return [self.query_abs_dirs()["abs_gecko_dir"]]
+
+ def query_commit_message(self):
+ return "Automatic version bump. CLOSED TREE NO BUG a=release"
+
+ def query_push_dirs(self):
+ return self.query_commit_dirs()
+
+ def query_push_args(self, cwd):
+ # cwd is not used here
+ hg_ssh_opts = "ssh -l {user} -i {key}".format(
+ user=self.config["ssh_user"],
+ key=os.path.expanduser(self.config["ssh_key"])
+ )
+ return ["-e", hg_ssh_opts, "-r", "."]
+
+ def pull(self):
+ super(PostReleaseVersionBump, self).pull(
+ repos=self.query_repos())
+
+ def bump_postrelease(self, *args, **kwargs):
+ """Bump version"""
+ dirs = self.query_abs_dirs()
+ for f in self.config["version_files"]:
+ curr_version = ".".join(
+ self.get_version(dirs['abs_gecko_dir'], f["file"]))
+ self.replace(os.path.join(dirs['abs_gecko_dir'], f["file"]),
+ curr_version, self.config["next_version"])
+
+ def tag(self):
+ dirs = self.query_abs_dirs()
+ tags = ["{product}_{version}_BUILD{build_number}",
+ "{product}_{version}_RELEASE"]
+ tags = [t.format(product=self.config["product"].upper(),
+ version=self.config["version"].replace(".", "_"),
+ build_number=self.config["build_number"])
+ for t in tags]
+ message = "No bug - Tagging {revision} with {tags} a=release CLOSED TREE"
+ message = message.format(
+ revision=self.config["revision"],
+ tags=', '.join(tags))
+ self.hg_tag(cwd=dirs["abs_gecko_dir"], tags=tags,
+ revision=self.config["revision"], message=message,
+ user=self.config["hg_user"], force=True)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PostReleaseVersionBump().run_and_exit()
diff --git a/testing/mozharness/scripts/release/publish_balrog.py b/testing/mozharness/scripts/release/publish_balrog.py
new file mode 100644
index 000000000..edb381311
--- /dev/null
+++ b/testing/mozharness/scripts/release/publish_balrog.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" updates.py
+
+A script publish a release to Balrog.
+
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+# PublishBalrog {{{1
+
+
+class PublishBalrog(MercurialScript, BuildbotMixin):
+
+ def __init__(self, require_config_file=True):
+ super(PublishBalrog, self).__init__(
+ all_actions=[
+ 'clobber',
+ 'pull',
+ 'submit-to-balrog',
+ ],
+ default_actions=[
+ 'clobber',
+ 'pull',
+ 'submit-to-balrog',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ 'credentials_file': 'oauth.txt',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PublishBalrog, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version and appVersion should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['product', 'version', 'build_number', 'channels',
+ 'balrog_api_root', 'schedule_at', 'background_rate']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(PublishBalrog, self).query_abs_dirs()
+ self.abs_dirs["abs_tools_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_channel_configs(self):
+ """Return a list of channel configs.
+ For RC builds it returns "release" and "beta" using
+ "enabled_if_version_matches" to match RC.
+
+ :return: list
+ """
+ return [(n, c) for n, c in self.config["update_channels"].items() if
+ n in self.config["channels"]]
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def pull(self):
+ super(PublishBalrog, self).pull(
+ repos=self.query_repos())
+
+
+ def submit_to_balrog(self):
+ for _, channel_config in self.query_channel_configs():
+ self._submit_to_balrog(channel_config)
+
+ def _submit_to_balrog(self, channel_config):
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['credentials_file'])
+ cmd = [
+ self.query_exe("python"),
+ os.path.join(dirs["abs_tools_dir"],
+ "scripts/build-promotion/balrog-release-shipper.py")]
+ cmd.extend([
+ "--api-root", self.config["balrog_api_root"],
+ "--credentials-file", auth,
+ "--username", self.config["balrog_username"],
+ "--version", self.config["version"],
+ "--product", self.config["product"],
+ "--build-number", str(self.config["build_number"]),
+ "--verbose",
+ ])
+ for r in channel_config["publish_rules"]:
+ cmd.extend(["--rules", r])
+ if self.config.get("schedule_at"):
+ cmd.extend(["--schedule-at", self.config["schedule_at"]])
+ if self.config.get("background_rate"):
+ cmd.extend(["--background-rate", str(self.config["background_rate"])])
+
+ self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PublishBalrog().run_and_exit()
diff --git a/testing/mozharness/scripts/release/push-candidate-to-releases.py b/testing/mozharness/scripts/release/push-candidate-to-releases.py
new file mode 100644
index 000000000..5339fa38a
--- /dev/null
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -0,0 +1,200 @@
+from multiprocessing.pool import ThreadPool
+import os
+import re
+import sys
+
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.aws import pop_aws_auth_from_env
+
+
+class ReleasePusher(BaseScript, VirtualenvMixin):
+ config_options = [
+ [["--product"], {
+ "dest": "product",
+ "help": "Product being released, eg: firefox, thunderbird",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name"], {
+ "dest": "bucket_name",
+ "help": "Bucket to copy files from candidates/ to releases/",
+ }],
+ [["--credentials"], {
+ "dest": "credentials",
+ "help": "File containing access key and secret access key",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "default": [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*[^k]\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*bouncer.apk$",
+ r"^.*contrib.*",
+ r"^.*/beetmover-checksums/.*$",
+ ],
+ "action": "append",
+ "help": "List of patterns to exclude from copy. The list can be "
+ "extended by passing multiple --exclude arguments.",
+ }],
+ [["-j", "--parallelization"], {
+ "dest": "parallelization",
+ "default": 20,
+ "type": "int",
+ "help": "Number of copy requests to run concurrently",
+ }],
+ ] + virtualenv_config_options
+
+ def __init__(self, aws_creds):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ "redo",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "push-to-releases",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "push-to-releases",
+ ],
+ )
+
+ # validate aws credentials
+ if not (all(aws_creds) or self.config.get('credentials')):
+ self.fatal("aws creds not defined. please add them to your config or env.")
+ if any(aws_creds) and self.config.get('credentials'):
+ self.fatal("aws creds found in env and self.config. please declare in one place only.")
+
+ # set aws credentials
+ if all(aws_creds):
+ self.aws_key_id, self.aws_secret_key = aws_creds
+ else: # use
+ self.aws_key_id, self.aws_secret_key = None, None
+ # set the env var for boto to read our special config file
+ # rather than anything else we have at ~/.boto
+ os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
+
+ def _get_candidates_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config['product'],
+ self.config["version"],
+ self.config["build_number"]
+ )
+
+ def _get_releases_prefix(self):
+ return "pub/{}/releases/{}/".format(
+ self.config["product"],
+ self.config["version"]
+ )
+
+ def _matches_exclude(self, keyname):
+ for exclude in self.config["excludes"]:
+ if re.search(exclude, keyname):
+ return True
+ return False
+
+ def push_to_releases(self):
+ """This step grabs the list of files in the candidates dir,
+ filters out any unwanted files from within them, and copies
+ the remainder."""
+ from boto.s3.connection import S3Connection
+ from boto.exception import S3CopyError, S3ResponseError
+ from redo import retry
+
+ # suppress boto debug logging, it's too verbose with --loglevel=debug
+ import logging
+ logging.getLogger('boto').setLevel(logging.INFO)
+
+ self.info("Connecting to S3")
+ conn = S3Connection(aws_access_key_id=self.aws_key_id,
+ aws_secret_access_key=self.aws_secret_key)
+ self.info("Getting bucket {}".format(self.config["bucket_name"]))
+ bucket = conn.get_bucket(self.config["bucket_name"])
+
+ # ensure the destination is empty
+ self.info("Checking destination {} is empty".format(self._get_releases_prefix()))
+ keys = [k for k in bucket.list(prefix=self._get_releases_prefix())]
+ if keys:
+ self.warning("Destination already exists with %s keys" % len(keys))
+
+ def worker(item):
+ source, destination = item
+
+ def copy_key():
+ source_key = bucket.get_key(source)
+ dest_key = bucket.get_key(destination)
+ # According to http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
+ # S3 key MD5 is represented as ETag, except when objects are
+ # uploaded using multipart method. In this case objects's ETag
+ # is constructed using its MD5, minus symbol, and number of
+ # part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823
+ source_md5 = source_key.etag.split("-")[0]
+ if dest_key:
+ dest_md5 = dest_key.etag.split("-")[0]
+ else:
+ dest_md5 = None
+
+ if not dest_key:
+ self.info("Copying {} to {}".format(source, destination))
+ bucket.copy_key(destination, self.config["bucket_name"],
+ source)
+ elif source_md5 == dest_md5:
+ self.warning(
+ "{} already exists with the same content ({}), skipping copy".format(
+ destination, dest_md5))
+ else:
+ self.fatal(
+ "{} already exists with the different content (src ETag: {}, dest ETag: {}), aborting".format(
+ destination, source_key.etag, dest_key.etag))
+
+ return retry(copy_key, sleeptime=5, max_sleeptime=60,
+ retry_exceptions=(S3CopyError, S3ResponseError))
+
+ def find_release_files():
+ candidates_prefix = self._get_candidates_prefix()
+ release_prefix = self._get_releases_prefix()
+ self.info("Getting key names from candidates")
+ for key in bucket.list(prefix=candidates_prefix):
+ keyname = key.name
+ if self._matches_exclude(keyname):
+ self.debug("Excluding {}".format(keyname))
+ else:
+ destination = keyname.replace(candidates_prefix,
+ release_prefix)
+ yield (keyname, destination)
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_release_files())
+
+if __name__ == "__main__":
+ myScript = ReleasePusher(pop_aws_auth_from_env())
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/updates.py b/testing/mozharness/scripts/release/updates.py
new file mode 100644
index 000000000..4b660a67b
--- /dev/null
+++ b/testing/mozharness/scripts/release/updates.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" updates.py
+
+A script to bump patcher configs, generate update verification configs, and
+publish top-level release blob information to Balrog.
+
+It clones the tools repo, modifies the existing patcher config to include
+current release build information, generates update verification configs,
+commits the changes and tags the repo using tags by Releng convention.
+After the changes are pushed to the repo, the script submits top-level release
+information to Balrog.
+"""
+
+import os
+import re
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+from mozharness.mozilla.release import get_previous_version
+
+
+# UpdatesBumper {{{1
+class UpdatesBumper(MercurialScript, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--ssh-user', ], {
+ "action": "store",
+ "dest": "ssh_user",
+ "type": "string",
+ "help": "SSH username with hg.mozilla.org permissions",
+ }],
+ [['--ssh-key', ], {
+ "action": "store",
+ "dest": "ssh_key",
+ "type": "string",
+ "help": "Path to SSH key.",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(UpdatesBumper, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'pull',
+ 'download-shipped-locales',
+ 'bump-configs',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ 'submit-to-balrog',
+ ],
+ default_actions=[
+ 'clobber',
+ 'pull',
+ 'download-shipped-locales',
+ 'bump-configs',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ 'submit-to-balrog',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ 'credentials_file': 'oauth.txt',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UpdatesBumper, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version and appVersion should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['product', 'version', 'build_number', 'revision',
+ 'appVersion', 'balrog_api_root', "channels"]:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ partials = [v.strip() for v in props["partial_versions"].split(",")]
+ self.config["partial_versions"] = [v.split("build") for v in partials]
+ self.config["platforms"] = [p.strip() for p in
+ props["platforms"].split(",")]
+ self.config["channels"] = [c.strip() for c in
+ props["channels"].split(",")]
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(UpdatesBumper, self).query_abs_dirs()
+ self.abs_dirs["abs_tools_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def query_commit_dirs(self):
+ return [self.query_abs_dirs()["abs_tools_dir"]]
+
+ def query_commit_message(self):
+ return "Automated configuration bump"
+
+ def query_push_dirs(self):
+ return self.query_commit_dirs()
+
+ def query_push_args(self, cwd):
+ # cwd is not used here
+ hg_ssh_opts = "ssh -l {user} -i {key}".format(
+ user=self.config["ssh_user"],
+ key=os.path.expanduser(self.config["ssh_key"])
+ )
+ return ["-e", hg_ssh_opts]
+
+ def query_shipped_locales_path(self):
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs["abs_work_dir"], "shipped-locales")
+
+ def query_channel_configs(self):
+ """Return a list of channel configs.
+ For RC builds it returns "release" and "beta" using
+ "enabled_if_version_matches" to match RC.
+
+ :return: list
+ """
+ return [(n, c) for n, c in self.config["update_channels"].items() if
+ n in self.config["channels"]]
+
+ def pull(self):
+ super(UpdatesBumper, self).pull(
+ repos=self.query_repos())
+
+ def download_shipped_locales(self):
+ dirs = self.query_abs_dirs()
+ self.mkdir_p(dirs["abs_work_dir"])
+ url = self.config["shipped-locales-url"].format(
+ revision=self.config["revision"])
+ if not self.download_file(url=url,
+ file_name=self.query_shipped_locales_path()):
+ self.fatal("Unable to fetch shipped-locales from %s" % url)
+
+ def bump_configs(self):
+ for channel, channel_config in self.query_channel_configs():
+ self.bump_patcher_config(channel_config)
+ self.bump_update_verify_configs(channel, channel_config)
+
+ def query_matching_partials(self, channel_config):
+ return [(v, b) for v, b in self.config["partial_versions"] if
+ re.match(channel_config["version_regex"], v)]
+
+ def query_patcher_config(self, channel_config):
+ dirs = self.query_abs_dirs()
+ patcher_config = os.path.join(
+ dirs["abs_tools_dir"], "release/patcher-configs",
+ channel_config["patcher_config"])
+ return patcher_config
+
+ def query_update_verify_config(self, channel, platform):
+ dirs = self.query_abs_dirs()
+ uvc = os.path.join(
+ dirs["abs_tools_dir"], "release/updates",
+ "{}-{}-{}.cfg".format(channel, self.config["product"], platform))
+ return uvc
+
+ def bump_patcher_config(self, channel_config):
+ # TODO: to make it possible to run this before we have files copied to
+ # the candidates directory, we need to add support to fetch build IDs
+ # from tasks.
+ dirs = self.query_abs_dirs()
+ env = {"PERL5LIB": os.path.join(dirs["abs_tools_dir"], "lib/perl")}
+ partial_versions = [v[0] for v in
+ self.query_matching_partials(channel_config)]
+ script = os.path.join(
+ dirs["abs_tools_dir"], "release/patcher-config-bump.pl")
+ patcher_config = self.query_patcher_config(channel_config)
+ cmd = [self.query_exe("perl"), script]
+ cmd.extend([
+ "-p", self.config["product"],
+ "-r", self.config["product"].capitalize(),
+ "-v", self.config["version"],
+ "-a", self.config["appVersion"],
+ "-o", get_previous_version(
+ self.config["version"], partial_versions),
+ "-b", str(self.config["build_number"]),
+ "-c", patcher_config,
+ "-f", self.config["archive_domain"],
+ "-d", self.config["download_domain"],
+ "-l", self.query_shipped_locales_path(),
+ ])
+ for v in partial_versions:
+ cmd.extend(["--partial-version", v])
+ for p in self.config["platforms"]:
+ cmd.extend(["--platform", p])
+ for mar_channel_id in channel_config["mar_channel_ids"]:
+ cmd.extend(["--mar-channel-id", mar_channel_id])
+ self.run_command(cmd, halt_on_failure=True, env=env)
+
+ def bump_update_verify_configs(self, channel, channel_config):
+ dirs = self.query_abs_dirs()
+ script = os.path.join(
+ dirs["abs_tools_dir"],
+ "scripts/build-promotion/create-update-verify-config.py")
+ patcher_config = self.query_patcher_config(channel_config)
+ for platform in self.config["platforms"]:
+ cmd = [self.query_exe("python"), script]
+ output = self.query_update_verify_config(channel, platform)
+ cmd.extend([
+ "--config", patcher_config,
+ "--platform", platform,
+ "--update-verify-channel",
+ channel_config["update_verify_channel"],
+ "--output", output,
+ "--archive-prefix", self.config["archive_prefix"],
+ "--previous-archive-prefix",
+ self.config["previous_archive_prefix"],
+ "--product", self.config["product"],
+ "--balrog-url", self.config["balrog_url"],
+ "--build-number", str(self.config["build_number"]),
+ ])
+
+ self.run_command(cmd, halt_on_failure=True)
+
+ def tag(self):
+ dirs = self.query_abs_dirs()
+ tags = ["{product}_{version}_BUILD{build_number}_RUNTIME",
+ "{product}_{version}_RELEASE_RUNTIME"]
+ tags = [t.format(product=self.config["product"].upper(),
+ version=self.config["version"].replace(".", "_"),
+ build_number=self.config["build_number"])
+ for t in tags]
+ self.hg_tag(cwd=dirs["abs_tools_dir"], tags=tags,
+ user=self.config["hg_user"], force=True)
+
+ def submit_to_balrog(self):
+ for _, channel_config in self.query_channel_configs():
+ self._submit_to_balrog(channel_config)
+
+ def _submit_to_balrog(self, channel_config):
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['credentials_file'])
+ cmd = [
+ self.query_exe("python"),
+ os.path.join(dirs["abs_tools_dir"],
+ "scripts/build-promotion/balrog-release-pusher.py")]
+ cmd.extend([
+ "--api-root", self.config["balrog_api_root"],
+ "--download-domain", self.config["download_domain"],
+ "--archive-domain", self.config["archive_domain"],
+ "--credentials-file", auth,
+ "--product", self.config["product"],
+ "--version", self.config["version"],
+ "--build-number", str(self.config["build_number"]),
+ "--app-version", self.config["appVersion"],
+ "--username", self.config["balrog_username"],
+ "--verbose",
+ ])
+ for c in channel_config["channel_names"]:
+ cmd.extend(["--channel", c])
+ for r in channel_config["rules_to_update"]:
+ cmd.extend(["--rule-to-update", r])
+ for p in self.config["platforms"]:
+ cmd.extend(["--platform", p])
+ for v, build_number in self.query_matching_partials(channel_config):
+ partial = "{version}build{build_number}".format(
+ version=v, build_number=build_number)
+ cmd.extend(["--partial-update", partial])
+ if channel_config["requires_mirrors"]:
+ cmd.append("--requires-mirrors")
+ if self.config["balrog_use_dummy_suffix"]:
+ cmd.append("--dummy")
+
+ self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
+
+# __main__ {{{1
+if __name__ == '__main__':
+ UpdatesBumper().run_and_exit()
diff --git a/testing/mozharness/scripts/release/uptake_monitoring.py b/testing/mozharness/scripts/release/uptake_monitoring.py
new file mode 100644
index 000000000..9ec24621f
--- /dev/null
+++ b/testing/mozharness/scripts/release/uptake_monitoring.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" uptake_monitoring.py
+
+A script to replace the old-fashion way of computing the uptake monitoring
+from the scheduler within the slaves.
+"""
+
+import os
+import sys
+import datetime
+import time
+import xml.dom.minidom
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+def get_tuxedo_uptake_url(tuxedo_server_url, related_product, os):
+ return '%s/uptake/?product=%s&os=%s' % (tuxedo_server_url,
+ related_product, os)
+
+
+class UptakeMonitoring(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(UptakeMonitoring, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "redo",
+ "requests",
+ ],
+
+ "virtualenv_path": "venv",
+ "credentials_file": "oauth.txt",
+ "buildbot_json_path": "buildprops.json",
+ "poll_interval": 60,
+ "poll_timeout": 20*60,
+ "min_uptake": 10000,
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "monitor-uptake",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "monitor-uptake",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UptakeMonitoring, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['tuxedo_server_url', 'version']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ return
+ partials = [v.strip() for v in props["partial_versions"].split(",")]
+ self.config["partial_versions"] = [v.split("build")[0] for v in partials]
+ self.config["platforms"] = [p.strip() for p in
+ props["platforms"].split(",")]
+
+ def _get_product_uptake(self, tuxedo_server_url, auth,
+ related_product, os):
+ from redo import retry
+ import requests
+
+ url = get_tuxedo_uptake_url(tuxedo_server_url, related_product, os)
+ self.info("Requesting {} from tuxedo".format(url))
+
+ def get_tuxedo_page():
+ r = requests.get(url, auth=auth,
+ verify=False, timeout=60)
+ r.raise_for_status()
+ return r.content
+
+ def calculateUptake(page):
+ doc = xml.dom.minidom.parseString(page)
+ uptake_values = []
+
+ for element in doc.getElementsByTagName('available'):
+ for node in element.childNodes:
+ if node.nodeType == xml.dom.minidom.Node.TEXT_NODE and \
+ node.data.isdigit():
+ uptake_values.append(int(node.data))
+ if not uptake_values:
+ uptake_values = [0]
+ return min(uptake_values)
+
+ page = retry(get_tuxedo_page)
+ uptake = calculateUptake(page)
+ self.info("Current uptake for {} is {}".format(related_product, uptake))
+ return uptake
+
+ def _get_release_uptake(self, auth):
+ assert isinstance(self.config["platforms"], (list, tuple))
+
+ # handle the products first
+ tuxedo_server_url = self.config["tuxedo_server_url"]
+ version = self.config["version"]
+ dl = []
+
+ for product, info in self.config["products"].iteritems():
+ if info.get("check_uptake"):
+ product_template = info["product-name"]
+ related_product = product_template % {"version": version}
+
+ enUS_platforms = set(self.config["platforms"])
+ paths_platforms = set(info["paths"].keys())
+ platforms = enUS_platforms.intersection(paths_platforms)
+
+ for platform in platforms:
+ bouncer_platform = info["paths"].get(platform).get('bouncer-platform')
+ dl.append(self._get_product_uptake(tuxedo_server_url, auth,
+ related_product, bouncer_platform))
+ # handle the partials as well
+ prev_versions = self.config["partial_versions"]
+ for product, info in self.config["partials"].iteritems():
+ if info.get("check_uptake"):
+ product_template = info["product-name"]
+ for prev_version in prev_versions:
+ subs = {
+ "version": version,
+ "prev_version": prev_version
+ }
+ related_product = product_template % subs
+
+ enUS_platforms = set(self.config["platforms"])
+ paths_platforms = set(info["paths"].keys())
+ platforms = enUS_platforms.intersection(paths_platforms)
+
+ for platform in platforms:
+ bouncer_platform = info["paths"].get(platform).get('bouncer-platform')
+ dl.append(self._get_product_uptake(tuxedo_server_url, auth,
+ related_product, bouncer_platform))
+ return min(dl)
+
+ def monitor_uptake(self):
+ credentials_file = os.path.join(os.getcwd(),
+ self.config["credentials_file"])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
+ self.info("Starting the loop to determine the uptake monitoring ...")
+
+ start_time = datetime.datetime.now()
+ while True:
+ delta = (datetime.datetime.now() - start_time).seconds
+ if delta > self.config["poll_timeout"]:
+ self.error("Uptake monitoring sadly timed-out")
+ raise Exception("Time-out during uptake monitoring")
+
+ uptake = self._get_release_uptake(auth)
+ self.info("Current uptake value to check is {}".format(uptake))
+
+ if uptake >= self.config["min_uptake"]:
+ self.info("Uptake monitoring is complete!")
+ break
+ else:
+ self.info("Mirrors not yet updated, sleeping for a bit ...")
+ time.sleep(self.config["poll_interval"])
+
+
+if __name__ == '__main__':
+ myScript = UptakeMonitoring()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/spidermonkey/build.b2g b/testing/mozharness/scripts/spidermonkey/build.b2g
new file mode 100755
index 000000000..958946230
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey/build.b2g
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+
+cd $SOURCE
+TOP=$(cd .. && echo $PWD)
+export MOZBUILD_STATE_PATH=$TOP/mozbuild-state
+[ -d $MOZBUILD_STATE_PATH ] || mkdir $MOZBUILD_STATE_PATH
+
+exec ./mach build -v -j8
diff --git a/testing/mozharness/scripts/spidermonkey/build.browser b/testing/mozharness/scripts/spidermonkey/build.browser
new file mode 100755
index 000000000..645d2ae86
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey/build.browser
@@ -0,0 +1,10 @@
+#!/bin/bash -e
+
+cd $SOURCE
+TOP=$(cd ..; pwd)
+export MOZBUILD_STATE_PATH=$TOP/mozbuild-state
+[ -d $MOZBUILD_STATE_PATH ] || mkdir $MOZBUILD_STATE_PATH
+
+export MOZCONFIG=$SOURCE/browser/config/mozconfigs/linux64/hazards
+
+exec ./mach build -v -j8
diff --git a/testing/mozharness/scripts/spidermonkey/build.shell b/testing/mozharness/scripts/spidermonkey/build.shell
new file mode 100755
index 000000000..7aad477ea
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey/build.shell
@@ -0,0 +1,6 @@
+#!/bin/bash -ex
+
+mkdir -p "$ANALYZED_OBJDIR"
+cd "$ANALYZED_OBJDIR"
+$SOURCE/js/src/configure --enable-debug --enable-optimize --enable-stdcxx-compat --enable-ctypes --enable-nspr-build
+make -j12 -s
diff --git a/testing/mozharness/scripts/spidermonkey_build.py b/testing/mozharness/scripts/spidermonkey_build.py
new file mode 100755
index 000000000..5522545da
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey_build.py
@@ -0,0 +1,482 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import copy
+from datetime import datetime
+from functools import wraps
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import MakefileErrorList
+from mozharness.base.script import BaseScript
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.building.hazards import HazardError, HazardAnalysis
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.tooltool import TooltoolMixin
+
+SUCCESS, WARNINGS, FAILURE, EXCEPTION, RETRY = xrange(5)
+
+
+def requires(*queries):
+ """Wrapper for detecting problems where some bit of information
+ required by the wrapped step is unavailable. Use it put prepending
+ @requires("foo"), which will check whether self.query_foo() returns
+ something useful."""
+ def make_wrapper(f):
+ @wraps(f)
+ def wrapper(self, *args, **kwargs):
+ for query in queries:
+ val = query(self)
+ goodval = not (val is None or "None" in str(val))
+ assert goodval, f.__name__ + " requires " + query.__name__ + " to return a value"
+ return f(self, *args, **kwargs)
+ return wrapper
+ return make_wrapper
+
+
+nuisance_env_vars = ['TERMCAP', 'LS_COLORS', 'PWD', '_']
+
+
+class SpidermonkeyBuild(MockMixin,
+ PurgeMixin, BaseScript,
+ VCSMixin, BuildbotMixin, TooltoolMixin, TransferMixin, BlobUploadMixin):
+ config_options = [
+ [["--repo"], {
+ "dest": "repo",
+ "help": "which gecko repo to get spidermonkey from",
+ }],
+ [["--source"], {
+ "dest": "source",
+ "help": "directory containing gecko source tree (instead of --repo)",
+ }],
+ [["--revision"], {
+ "dest": "revision",
+ }],
+ [["--branch"], {
+ "dest": "branch",
+ }],
+ [["--vcs-share-base"], {
+ "dest": "vcs_share_base",
+ "help": "base directory for shared repositories",
+ }],
+ [["-j"], {
+ "dest": "concurrency",
+ "type": int,
+ "default": 4,
+ "help": "number of simultaneous jobs used while building the shell " +
+ "(currently ignored for the analyzed build",
+ }] + copy.deepcopy(blobupload_config_options)
+ ]
+
+ def __init__(self):
+ super(SpidermonkeyBuild, self).__init__(
+ config_options=self.config_options,
+ # other stuff
+ all_actions=[
+ 'purge',
+ 'checkout-tools',
+
+ # First, build an optimized JS shell for running the analysis
+ 'checkout-source',
+ 'get-blobs',
+ 'clobber-shell',
+ 'configure-shell',
+ 'build-shell',
+
+ # Next, build a tree with the analysis plugin active. Note that
+ # we are using the same checkout for the JS shell build and the
+ # build of the source to be analyzed, which is a little
+ # unnecessary (no need to rebuild the JS shell all the time).
+ # (Different objdir, though.)
+
+ 'clobber-analysis',
+ 'setup-analysis',
+ 'run-analysis',
+ 'collect-analysis-output',
+ 'upload-analysis',
+ 'check-expectations',
+ ],
+ default_actions=[
+ 'purge',
+ 'checkout-tools',
+ 'checkout-source',
+ 'get-blobs',
+ 'clobber-shell',
+ 'configure-shell',
+ 'build-shell',
+ 'clobber-analysis',
+ 'setup-analysis',
+ 'run-analysis',
+ 'collect-analysis-output',
+ # Temporarily disabled, see bug 1211402
+ # 'upload-analysis',
+ 'check-expectations',
+ ],
+ config={
+ 'default_vcs': 'hg',
+ 'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
+ 'ccache': True,
+ 'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
+ 'tools_repo': 'https://hg.mozilla.org/build/tools',
+
+ 'upload_ssh_server': None,
+ 'upload_remote_basepath': None,
+ 'enable_try_uploads': True,
+ 'source': None,
+ 'stage_product': 'firefox',
+ },
+ )
+
+ self.buildid = None
+ self.create_virtualenv()
+ self.analysis = HazardAnalysis()
+
+ def _pre_config_lock(self, rw_config):
+ if self.config['source']:
+ self.config['srcdir'] = self.config['source']
+ super(SpidermonkeyBuild, self)._pre_config_lock(rw_config)
+
+ if self.buildbot_config is None:
+ self.info("Reading buildbot build properties...")
+ self.read_buildbot_config()
+
+ if self.buildbot_config:
+ bb_props = [('mock_target', 'mock_target', None),
+ ('hgurl', 'hgurl', None),
+ ('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
+ ('force_clobber', 'force_clobber', None),
+ ('branch', 'blob_upload_branch', None),
+ ]
+ buildbot_props = self.buildbot_config.get('properties', {})
+ for bb_prop, cfg_prop, default in bb_props:
+ if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
+ self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
+ self.config['is_automation'] = True
+ else:
+ self.config['is_automation'] = False
+ self.config.setdefault('blob_upload_branch', 'devel')
+
+ dirs = self.query_abs_dirs()
+ replacements = self.config['env_replacements'].copy()
+ for k,v in replacements.items():
+ replacements[k] = v % dirs
+
+ self.env = self.query_env(replace_dict=replacements,
+ partial_env=self.config['partial_env'],
+ purge_env=nuisance_env_vars)
+ self.env['MOZ_UPLOAD_DIR'] = dirs['abs_blob_upload_dir']
+ self.env['TOOLTOOL_DIR'] = dirs['abs_work_dir']
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = BaseScript.query_abs_dirs(self)
+
+ abs_work_dir = abs_dirs['abs_work_dir']
+ dirs = {
+ 'shell_objdir':
+ os.path.join(abs_work_dir, self.config['shell-objdir']),
+ 'mozharness_scriptdir':
+ os.path.abspath(os.path.dirname(__file__)),
+ 'abs_analysis_dir':
+ os.path.join(abs_work_dir, self.config['analysis-dir']),
+ 'abs_analyzed_objdir':
+ os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
+ 'analysis_scriptdir':
+ os.path.join(self.config['srcdir'], self.config['analysis-scriptdir']),
+ 'abs_tools_dir':
+ os.path.join(abs_dirs['base_work_dir'], 'tools'),
+ 'gecko_src':
+ os.path.join(abs_work_dir, self.config['srcdir']),
+ 'abs_blob_upload_dir':
+ os.path.join(abs_work_dir, 'blobber_upload_dir'),
+ }
+
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def query_repo(self):
+ if self.config.get('repo'):
+ return self.config['repo']
+ elif self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.config['hgurl'] + self.buildbot_config['properties']['repo_path']
+ else:
+ return None
+
+ def query_revision(self):
+ if 'revision' in self.buildbot_properties:
+ revision = self.buildbot_properties['revision']
+ elif self.buildbot_config and 'sourcestamp' in self.buildbot_config:
+ revision = self.buildbot_config['sourcestamp']['revision']
+ else:
+ # Useful for local testing. In actual use, this would always be
+ # None.
+ revision = self.config.get('revision')
+
+ return revision
+
+ def query_branch(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['branch']
+ elif 'branch' in self.config:
+ # Used for locally testing try vs non-try
+ return self.config['branch']
+ else:
+ return os.path.basename(self.query_repo())
+
+ def query_compiler_manifest(self):
+ dirs = self.query_abs_dirs()
+ manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
+ if os.path.exists(manifest):
+ return manifest
+ return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
+
+ def query_sixgill_manifest(self):
+ dirs = self.query_abs_dirs()
+ manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
+ if os.path.exists(manifest):
+ return manifest
+ return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])
+
+ def query_buildid(self):
+ if self.buildid:
+ return self.buildid
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ self.buildid = self.buildbot_config['properties'].get('buildid')
+ if not self.buildid:
+ self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
+ return self.buildid
+
+ def query_upload_ssh_server(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['upload_ssh_server']
+ else:
+ return self.config['upload_ssh_server']
+
+ def query_upload_ssh_key(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ key = self.buildbot_config['properties']['upload_ssh_key']
+ else:
+ key = self.config['upload_ssh_key']
+ if self.mock_enabled and not key.startswith("/"):
+ key = "/home/mock_mozilla/.ssh/" + key
+ return key
+
+ def query_upload_ssh_user(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['upload_ssh_user']
+ else:
+ return self.config['upload_ssh_user']
+
+ def query_product(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['product']
+ else:
+ return self.config['product']
+
+ def query_upload_remote_basepath(self):
+ if self.config.get('upload_remote_basepath'):
+ return self.config['upload_remote_basepath']
+ else:
+ return "/pub/mozilla.org/{product}".format(
+ product=self.query_product(),
+ )
+
+ def query_upload_remote_baseuri(self):
+ baseuri = self.config.get('upload_remote_baseuri')
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ buildprops = self.buildbot_config['properties']
+ if 'upload_remote_baseuri' in buildprops:
+ baseuri = buildprops['upload_remote_baseuri']
+ return baseuri.strip("/") if baseuri else None
+
+ def query_target(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['platform']
+ else:
+ return self.config.get('target')
+
+ def query_upload_path(self):
+ branch = self.query_branch()
+
+ common = {
+ 'basepath': self.query_upload_remote_basepath(),
+ 'branch': branch,
+ 'target': self.query_target(),
+ }
+
+ if branch == 'try':
+ if not self.config['enable_try_uploads']:
+ return None
+ try:
+ user = self.buildbot_config['sourcestamp']['changes'][0]['who']
+ except (KeyError, TypeError):
+ user = "unknown"
+ return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
+ user=user,
+ rev=self.query_revision(),
+ **common
+ )
+ else:
+ return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
+ buildid=self.query_buildid(),
+ **common
+ )
+
+ def query_do_upload(self):
+ if self.query_branch() == 'try':
+ return self.config.get('enable_try_uploads')
+ return True
+
+ # Actions {{{2
+ def purge(self):
+ dirs = self.query_abs_dirs()
+ self.info("purging, abs_upload_dir=" + dirs['abs_upload_dir'])
+ PurgeMixin.clobber(
+ self,
+ always_clobber_dirs=[
+ dirs['abs_upload_dir'],
+ ],
+ )
+
+ def checkout_tools(self):
+ dirs = self.query_abs_dirs()
+
+ # If running from within a directory also passed as the --source dir,
+ # this has the danger of clobbering <source>/tools/
+ if self.config['source']:
+ srcdir = self.config['source']
+ if os.path.samefile(srcdir, os.path.dirname(dirs['abs_tools_dir'])):
+ raise Exception("Cannot run from source checkout to avoid overwriting subdirs")
+
+ rev = self.vcs_checkout(
+ vcs='hg',
+ branch="default",
+ repo=self.config['tools_repo'],
+ clean=False,
+ dest=dirs['abs_tools_dir'],
+ )
+ self.set_buildbot_property("tools_revision", rev, write_to_file=True)
+
+ def do_checkout_source(self):
+ # --source option means to use an existing source directory instead of checking one out.
+ if self.config['source']:
+ return
+
+ dirs = self.query_abs_dirs()
+ dest = dirs['gecko_src']
+
+ # Pre-create the directory to appease the share extension
+ if not os.path.exists(dest):
+ self.mkdir_p(dest)
+
+ rev = self.vcs_checkout(
+ repo=self.query_repo(),
+ dest=dest,
+ revision=self.query_revision(),
+ branch=self.config.get('branch'),
+ clean=True,
+ )
+ self.set_buildbot_property('source_revision', rev, write_to_file=True)
+
+ def checkout_source(self):
+ try:
+ self.do_checkout_source()
+ except Exception as e:
+ self.fatal("checkout failed: " + str(e), exit_code=RETRY)
+
+ def get_blobs(self):
+ work_dir = self.query_abs_dirs()['abs_work_dir']
+ if not os.path.exists(work_dir):
+ self.mkdir_p(work_dir)
+ self.tooltool_fetch(self.query_compiler_manifest(), output_dir=work_dir)
+ self.tooltool_fetch(self.query_sixgill_manifest(), output_dir=work_dir)
+
+ def clobber_shell(self):
+ self.analysis.clobber_shell(self)
+
+ def configure_shell(self):
+ self.enable_mock()
+
+ try:
+ self.analysis.configure_shell(self)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+ self.disable_mock()
+
+ def build_shell(self):
+ self.enable_mock()
+
+ try:
+ self.analysis.build_shell(self)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+ self.disable_mock()
+
+ def clobber_analysis(self):
+ self.analysis.clobber(self)
+
+ def setup_analysis(self):
+ self.analysis.setup(self)
+
+ def run_analysis(self):
+ self.enable_mock()
+
+ upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.exists(upload_dir):
+ self.mkdir_p(upload_dir)
+
+ env = self.env.copy()
+ env['MOZ_UPLOAD_DIR'] = upload_dir
+
+ try:
+ self.analysis.run(self, env=env, error_list=MakefileErrorList)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+ self.disable_mock()
+
+ def collect_analysis_output(self):
+ self.analysis.collect_output(self)
+
+ def upload_analysis(self):
+ if not self.config['is_automation']:
+ return
+
+ if not self.query_do_upload():
+ self.info("Uploads disabled for this build. Skipping...")
+ return
+
+ self.enable_mock()
+
+ try:
+ self.analysis.upload_results(self)
+ except HazardError as e:
+ self.error(e)
+ self.return_code = WARNINGS
+
+ self.disable_mock()
+
+ def check_expectations(self):
+ try:
+ self.analysis.check_expectations(self)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+
+# main {{{1
+if __name__ == '__main__':
+ myScript = SpidermonkeyBuild()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/talos_script.py b/testing/mozharness/scripts/talos_script.py
new file mode 100755
index 000000000..dc4161193
--- /dev/null
+++ b/testing/mozharness/scripts/talos_script.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""talos
+
+"""
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.talos import Talos
+
+if __name__ == '__main__':
+ talos = Talos()
+ talos.run_and_exit()
diff --git a/testing/mozharness/scripts/web_platform_tests.py b/testing/mozharness/scripts/web_platform_tests.py
new file mode 100755
index 000000000..7cd0e3842
--- /dev/null
+++ b/testing/mozharness/scripts/web_platform_tests.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import copy
+import glob
+import json
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import PreScriptAction
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options, TOOLTOOL_PLATFORM_DIR
+
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+from mozharness.base.log import INFO
+
+class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin):
+ config_options = [
+ [['--test-type'], {
+ "action": "extend",
+ "dest": "test_type",
+ "help": "Specify the test types to run."}
+ ],
+ [['--e10s'], {
+ "action": "store_true",
+ "dest": "e10s",
+ "default": False,
+ "help": "Run with e10s enabled"}
+ ],
+ [["--total-chunks"], {
+ "action": "store",
+ "dest": "total_chunks",
+ "help": "Number of total chunks"}
+ ],
+ [["--this-chunk"], {
+ "action": "store",
+ "dest": "this_chunk",
+ "help": "Number of this chunk"}
+ ],
+ [["--allow-software-gl-layers"], {
+ "action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}]
+ ] + copy.deepcopy(testing_config_options) + \
+ copy.deepcopy(blobupload_config_options)
+
+ def __init__(self, require_config_file=True):
+ super(WebPlatformTest, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'fetch-geckodriver',
+ 'create-virtualenv',
+ 'pull',
+ 'install',
+ 'run-tests',
+ ],
+ require_config_file=require_config_file,
+ config={'require_test_zip': True})
+
+ # Surely this should be in the superclass
+ c = self.config
+ self.installer_url = c.get('installer_url')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.abs_app_dir = None
+ self.geckodriver_path = None
+
+ def query_abs_app_dir(self):
+ """We can't set this in advance, because OSX install directories
+ change depending on branding and opt/debug.
+ """
+ if self.abs_app_dir:
+ return self.abs_app_dir
+ if not self.binary_path:
+ self.fatal("Can't determine abs_app_dir (binary_path not set!)")
+ self.abs_app_dir = os.path.dirname(self.binary_path)
+ return self.abs_app_dir
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(WebPlatformTest, self).query_abs_dirs()
+
+ dirs = {}
+ dirs['abs_app_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'application')
+ dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
+ dirs["abs_wpttest_dir"] = os.path.join(dirs['abs_test_install_dir'], "web-platform")
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'marionette_requirements.txt')
+
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+
+ def _query_cmd(self):
+ if not self.binary_path:
+ self.fatal("Binary path could not be determined")
+ #And exit
+
+ c = self.config
+ dirs = self.query_abs_dirs()
+ abs_app_dir = self.query_abs_app_dir()
+ run_file_name = "runtests.py"
+
+ cmd = [self.query_python_path('python'), '-u']
+ cmd.append(os.path.join(dirs["abs_wpttest_dir"], run_file_name))
+
+ # Make sure that the logging directory exists
+ if self.mkdir_p(dirs["abs_blob_upload_dir"]) == -1:
+ self.fatal("Could not create blobber upload directory")
+ # Exit
+
+ cmd += ["--log-raw=-",
+ "--log-raw=%s" % os.path.join(dirs["abs_blob_upload_dir"],
+ "wpt_raw.log"),
+ "--log-errorsummary=%s" % os.path.join(dirs["abs_blob_upload_dir"],
+ "wpt_errorsummary.log"),
+ "--binary=%s" % self.binary_path,
+ "--symbols-path=%s" % self.query_symbols_url(),
+ "--stackwalk-binary=%s" % self.query_minidump_stackwalk(),
+ "--stackfix-dir=%s" % os.path.join(dirs["abs_test_install_dir"], "bin")]
+
+ for test_type in c.get("test_type", []):
+ cmd.append("--test-type=%s" % test_type)
+
+ if not c["e10s"]:
+ cmd.append("--disable-e10s")
+
+ for opt in ["total_chunks", "this_chunk"]:
+ val = c.get(opt)
+ if val:
+ cmd.append("--%s=%s" % (opt.replace("_", "-"), val))
+
+ if "wdspec" in c.get("test_type", []):
+ assert self.geckodriver_path is not None
+ cmd.append("--webdriver-binary=%s" % self.geckodriver_path)
+
+ options = list(c.get("options", []))
+
+ str_format_values = {
+ 'binary_path': self.binary_path,
+ 'test_path': dirs["abs_wpttest_dir"],
+ 'test_install_path': dirs["abs_test_install_dir"],
+ 'abs_app_dir': abs_app_dir,
+ 'abs_work_dir': dirs["abs_work_dir"]
+ }
+
+ try_options, try_tests = self.try_args("web-platform-tests")
+
+ cmd.extend(self.query_options(options,
+ try_options,
+ str_format_values=str_format_values))
+ cmd.extend(self.query_tests_args(try_tests,
+ str_format_values=str_format_values))
+
+ return cmd
+
+ def download_and_extract(self):
+ super(WebPlatformTest, self).download_and_extract(
+ extract_dirs=["bin/*",
+ "config/*",
+ "mozbase/*",
+ "marionette/*",
+ "tools/wptserve/*",
+ "web-platform/*"],
+ suite_categories=["web-platform"])
+
+ def fetch_geckodriver(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ platform_name = self.platform_name()
+
+ if "wdspec" not in c.get("test_type", []):
+ return
+
+ if platform_name != "linux64":
+ self.fatal("Don't have a geckodriver for %s" % platform_name)
+
+ tooltool_path = os.path.join(dirs["abs_test_install_dir"],
+ "config",
+ "tooltool-manifests",
+ TOOLTOOL_PLATFORM_DIR[platform_name],
+ "geckodriver.manifest")
+
+ with open(tooltool_path) as f:
+ manifest = json.load(f)
+
+ assert len(manifest) == 1
+ geckodriver_filename = manifest[0]["filename"]
+ assert geckodriver_filename.endswith(".tar.gz")
+
+ self.tooltool_fetch(
+ manifest=tooltool_path,
+ output_dir=dirs['abs_work_dir'],
+ cache=c.get('tooltool_cache')
+ )
+
+ compressed_path = os.path.join(dirs['abs_work_dir'], geckodriver_filename)
+ tar = self.query_exe('tar', return_type="list")
+ self.run_command(tar + ["xf", compressed_path], cwd=dirs['abs_work_dir'],
+ halt_on_failure=True, fatal_exit_code=3)
+ self.geckodriver_path = os.path.join(dirs['abs_work_dir'], "geckodriver")
+
+ def run_tests(self):
+ dirs = self.query_abs_dirs()
+ cmd = self._query_cmd()
+
+ parser = StructuredOutputParser(config=self.config,
+ log_obj=self.log_obj,
+ log_compact=True)
+
+ env = {'MINIDUMP_SAVE_PATH': dirs['abs_blob_upload_dir']}
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ env = self.query_env(partial_env=env, log_level=INFO)
+
+ return_code = self.run_command(cmd,
+ cwd=dirs['abs_work_dir'],
+ output_timeout=1000,
+ output_parser=parser,
+ env=env)
+
+ tbpl_status, log_level = parser.evaluate_parser(return_code)
+
+ self.buildbot_status(tbpl_status, level=log_level)
+
+
+# main {{{1
+if __name__ == '__main__':
+ web_platform_tests = WebPlatformTest()
+ web_platform_tests.run_and_exit()
diff --git a/testing/mozharness/setup.cfg b/testing/mozharness/setup.cfg
new file mode 100644
index 000000000..d8057aec1
--- /dev/null
+++ b/testing/mozharness/setup.cfg
@@ -0,0 +1,2 @@
+[nosetests]
+exclude=TestingMixin
diff --git a/testing/mozharness/setup.py b/testing/mozharness/setup.py
new file mode 100644
index 000000000..5bcb36d63
--- /dev/null
+++ b/testing/mozharness/setup.py
@@ -0,0 +1,35 @@
+import os
+from setuptools import setup, find_packages
+
+try:
+ here = os.path.dirname(os.path.abspath(__file__))
+ description = open(os.path.join(here, 'README.txt')).read()
+except IOError:
+ description = ''
+
+import mozharness
+version = mozharness.version_string
+
+dependencies = ['virtualenv', 'mock', "coverage", "nose", "pylint", "pyflakes"]
+try:
+ import json
+except ImportError:
+ dependencies.append('simplejson')
+
+setup(name='mozharness',
+ version=version,
+ description="Mozharness is a configuration-driven script harness with full logging that allows production infrastructure and individual developers to use the same scripts. ",
+ long_description=description,
+ classifiers=[], # Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
+ author='Aki Sasaki',
+ author_email='aki@mozilla.com',
+ url='https://hg.mozilla.org/build/mozharness/',
+ license='MPL',
+ packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=dependencies,
+ entry_points="""
+ # -*- Entry points: -*-
+ """,
+ )
diff --git a/testing/mozharness/test/README b/testing/mozharness/test/README
new file mode 100644
index 000000000..889c8a83d
--- /dev/null
+++ b/testing/mozharness/test/README
@@ -0,0 +1,2 @@
+test/ : non-network-dependent unit tests
+test/networked/ : network-dependent unit tests.
diff --git a/testing/mozharness/test/helper_files/.noserc b/testing/mozharness/test/helper_files/.noserc
new file mode 100644
index 000000000..e6f21cf31
--- /dev/null
+++ b/testing/mozharness/test/helper_files/.noserc
@@ -0,0 +1,2 @@
+[nosetests]
+with-xunit=1
diff --git a/testing/mozharness/test/helper_files/archives/archive.tar b/testing/mozharness/test/helper_files/archives/archive.tar
new file mode 100644
index 000000000..1dc094198
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/archive.tar
Binary files differ
diff --git a/testing/mozharness/test/helper_files/archives/archive.tar.bz2 b/testing/mozharness/test/helper_files/archives/archive.tar.bz2
new file mode 100644
index 000000000..c393ea4b8
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/archive.tar.bz2
Binary files differ
diff --git a/testing/mozharness/test/helper_files/archives/archive.tar.gz b/testing/mozharness/test/helper_files/archives/archive.tar.gz
new file mode 100644
index 000000000..0fbfa39b1
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/archive.tar.gz
Binary files differ
diff --git a/testing/mozharness/test/helper_files/archives/archive.zip b/testing/mozharness/test/helper_files/archives/archive.zip
new file mode 100644
index 000000000..aa2fb34c1
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/archive.zip
Binary files differ
diff --git a/testing/mozharness/test/helper_files/archives/archive_invalid_filename.zip b/testing/mozharness/test/helper_files/archives/archive_invalid_filename.zip
new file mode 100644
index 000000000..20bdc5acd
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/archive_invalid_filename.zip
Binary files differ
diff --git a/testing/mozharness/test/helper_files/archives/reference/bin/script.sh b/testing/mozharness/test/helper_files/archives/reference/bin/script.sh
new file mode 100755
index 000000000..134f2933c
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/reference/bin/script.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo Hello world!
diff --git a/testing/mozharness/test/helper_files/archives/reference/lorem.txt b/testing/mozharness/test/helper_files/archives/reference/lorem.txt
new file mode 100644
index 000000000..d2cf010d3
--- /dev/null
+++ b/testing/mozharness/test/helper_files/archives/reference/lorem.txt
@@ -0,0 +1 @@
+Lorem ipsum dolor sit amet.
diff --git a/testing/mozharness/test/helper_files/create_archives.sh b/testing/mozharness/test/helper_files/create_archives.sh
new file mode 100755
index 000000000..314b55d27
--- /dev/null
+++ b/testing/mozharness/test/helper_files/create_archives.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+# Script to auto-generate the different archive types under the archives directory.
+
+cd archives
+
+rm archive.*
+
+tar cf archive.tar -C reference .
+gzip -fk archive.tar >archive.tar.gz
+bzip2 -fk archive.tar >archive.tar.bz2
+cd reference && zip ../archive.zip -r * && cd ..
diff --git a/testing/mozharness/test/helper_files/init_hgrepo.sh b/testing/mozharness/test/helper_files/init_hgrepo.sh
new file mode 100755
index 000000000..c978ebe73
--- /dev/null
+++ b/testing/mozharness/test/helper_files/init_hgrepo.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Set up an hg repo for testing
+dest=$1
+if [ -z "$dest" ]; then
+ echo You must specify a destination directory 1>&2
+ exit 1
+fi
+
+rm -rf $dest
+mkdir $dest
+cd $dest
+hg init
+
+echo "Hello world $RANDOM" > hello.txt
+hg add hello.txt
+hg commit -m "Adding hello"
+
+hg branch branch2 > /dev/null
+echo "So long, farewell" >> hello.txt
+hg commit -m "Changing hello on branch"
+
+hg checkout default
+echo "Is this thing on?" >> hello.txt
+hg commit -m "Last change on default"
diff --git a/testing/mozharness/test/helper_files/locales.json b/testing/mozharness/test/helper_files/locales.json
new file mode 100644
index 000000000..c9056b1d1
--- /dev/null
+++ b/testing/mozharness/test/helper_files/locales.json
@@ -0,0 +1,18 @@
+{
+ "ar": {
+ "revision": "default",
+ "platforms": ["maemo"]
+ },
+ "be": {
+ "revision": "default",
+ "platforms": ["maemo"]
+ },
+ "de": {
+ "revision": "default",
+ "platforms": ["maemo", "maemo-multilocale", "android-multilocale"]
+ },
+ "es-ES": {
+ "revision": "default",
+ "platforms": ["maemo", "maemo-multilocale", "android-multilocale"]
+ }
+}
diff --git a/testing/mozharness/test/helper_files/locales.txt b/testing/mozharness/test/helper_files/locales.txt
new file mode 100644
index 000000000..0b65ab76d
--- /dev/null
+++ b/testing/mozharness/test/helper_files/locales.txt
@@ -0,0 +1,4 @@
+ar
+be
+de
+es-ES
diff --git a/testing/mozharness/test/hgrc b/testing/mozharness/test/hgrc
new file mode 100644
index 000000000..85e670518
--- /dev/null
+++ b/testing/mozharness/test/hgrc
@@ -0,0 +1,9 @@
+[extensions]
+mq =
+purge =
+rebase =
+share =
+transplant =
+
+[ui]
+username = tester <tester@example.com>
diff --git a/testing/mozharness/test/pip-freeze.example.txt b/testing/mozharness/test/pip-freeze.example.txt
new file mode 100644
index 000000000..56e06923f
--- /dev/null
+++ b/testing/mozharness/test/pip-freeze.example.txt
@@ -0,0 +1,19 @@
+MakeItSo==0.2.6
+PyYAML==3.10
+Tempita==0.5.1
+WebOb==1.2b3
+-e hg+http://k0s.org/mozilla/hg/configuration@35416ad140982c11eba0a2d6b96d683f53429e94#egg=configuration-dev
+coverage==3.5.1
+-e hg+http://k0s.org/mozilla/hg/jetperf@4645ae34d2c41a353dcdbd856b486b6d3faabb99#egg=jetperf-dev
+logilab-astng==0.23.1
+logilab-common==0.57.1
+mozdevice==0.2
+-e hg+https://hg.mozilla.org/build/mozharness@df6b7f1e14d8c472125ef7a77b8a3b40c96ae181#egg=mozharness-jetperf
+mozhttpd==0.3
+mozinfo==0.3.3
+nose==1.1.2
+pyflakes==0.5.0
+pylint==0.25.1
+-e hg+https://hg.mozilla.org/build/talos@ee5c0b090d808e81a8fc5ba5f96b012797b3e785#egg=talos-dev
+virtualenv==1.7.1.2
+wsgiref==0.1.2
diff --git a/testing/mozharness/test/test_base_config.py b/testing/mozharness/test/test_base_config.py
new file mode 100644
index 000000000..42ec7a641
--- /dev/null
+++ b/testing/mozharness/test/test_base_config.py
@@ -0,0 +1,308 @@
+import os
+import unittest
+
+JSON_TYPE = None
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+ JSON_TYPE = 'json'
+else:
+ JSON_TYPE = 'simplejson'
+
+import mozharness.base.config as config
+from copy import deepcopy
+
+MH_DIR = os.path.dirname(os.path.dirname(__file__))
+
+
+class TestParseConfigFile(unittest.TestCase):
+ def _get_json_config(self, filename=os.path.join(MH_DIR, "configs", "test", "test.json"),
+ output='dict'):
+ fh = open(filename)
+ contents = json.load(fh)
+ fh.close()
+ if 'output' == 'dict':
+ return dict(contents)
+ else:
+ return contents
+
+ def _get_python_config(self, filename=os.path.join(MH_DIR, "configs", "test", "test.py"),
+ output='dict'):
+ global_dict = {}
+ local_dict = {}
+ execfile(filename, global_dict, local_dict)
+ return local_dict['config']
+
+ def test_json_config(self):
+ c = config.BaseConfig(initial_config_file='test/test.json')
+ content_dict = self._get_json_config()
+ for key in content_dict.keys():
+ self.assertEqual(content_dict[key], c._config[key])
+
+ def test_python_config(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ config_dict = self._get_python_config()
+ for key in config_dict.keys():
+ self.assertEqual(config_dict[key], c._config[key])
+
+ def test_illegal_config(self):
+ self.assertRaises(IOError, config.parse_config_file, "this_file_does_not_exist.py", search_path="yadda")
+
+ def test_illegal_suffix(self):
+ self.assertRaises(RuntimeError, config.parse_config_file, "test/test.illegal_suffix")
+
+ def test_malformed_json(self):
+ if JSON_TYPE == 'simplejson':
+ self.assertRaises(json.decoder.JSONDecodeError, config.parse_config_file, "test/test_malformed.json")
+ else:
+ self.assertRaises(ValueError, config.parse_config_file, "test/test_malformed.json")
+
+ def test_malformed_python(self):
+ self.assertRaises(SyntaxError, config.parse_config_file, "test/test_malformed.py")
+
+ def test_multiple_config_files_override_string(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py'])
+ self.assertEqual(c._config['override_string'], 'yay')
+
+ def test_multiple_config_files_override_list(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py'])
+ self.assertEqual(c._config['override_list'], ['yay', 'worked'])
+
+ def test_multiple_config_files_override_dict(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py'])
+ self.assertEqual(c._config['override_dict'], {'yay': 'worked'})
+
+ def test_multiple_config_files_keep_string(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py'])
+ self.assertEqual(c._config['keep_string'], "don't change me")
+
+ def test_optional_config_files_override_value(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py',
+ '--opt-cfg', 'test/test_optional.py'])
+ self.assertEqual(c._config['opt_override'], "new stuff")
+
+ def test_optional_config_files_missing_config(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py',
+ '--opt-cfg', 'test/test_optional.py,does_not_exist.py'])
+ self.assertEqual(c._config['opt_override'], "new stuff")
+
+ def test_optional_config_files_keep_string(self):
+ c = config.BaseConfig(initial_config_file='test/test.py')
+ c.parse_args(['--cfg', 'test/test_override.py,test/test_override2.py',
+ '--opt-cfg', 'test/test_optional.py'])
+ self.assertEqual(c._config['keep_string'], "don't change me")
+
+
+class TestReadOnlyDict(unittest.TestCase):
+ control_dict = {
+ 'b': '2',
+ 'c': {'d': '4'},
+ 'e': ['f', 'g'],
+ 'e': ['f', 'g', {'turtles': ['turtle1']}],
+ 'd': {
+ 'turtles': ['turtle1']
+ }
+ }
+
+ def get_unlocked_ROD(self):
+ r = config.ReadOnlyDict(self.control_dict)
+ return r
+
+ def get_locked_ROD(self):
+ r = config.ReadOnlyDict(self.control_dict)
+ r.lock()
+ return r
+
+ def test_create_ROD(self):
+ r = self.get_unlocked_ROD()
+ self.assertEqual(r, self.control_dict,
+ msg="can't transfer dict to ReadOnlyDict")
+
+ def test_pop_item(self):
+ r = self.get_unlocked_ROD()
+ r.popitem()
+ self.assertEqual(len(r), len(self.control_dict) - 1,
+ msg="can't popitem() ReadOnlyDict when unlocked")
+
+ def test_pop(self):
+ r = self.get_unlocked_ROD()
+ r.pop('e')
+ self.assertEqual(len(r), len(self.control_dict) - 1,
+ msg="can't pop() ReadOnlyDict when unlocked")
+
+ def test_set(self):
+ r = self.get_unlocked_ROD()
+ r['e'] = 'yarrr'
+ self.assertEqual(r['e'], 'yarrr',
+ msg="can't set var in ReadOnlyDict when unlocked")
+
+ def test_del(self):
+ r = self.get_unlocked_ROD()
+ del r['e']
+ self.assertEqual(len(r), len(self.control_dict) - 1,
+ msg="can't del in ReadOnlyDict when unlocked")
+
+ def test_clear(self):
+ r = self.get_unlocked_ROD()
+ r.clear()
+ self.assertEqual(r, {},
+ msg="can't clear() ReadOnlyDict when unlocked")
+
+ def test_set_default(self):
+ r = self.get_unlocked_ROD()
+ for key in self.control_dict.keys():
+ r.setdefault(key, self.control_dict[key])
+ self.assertEqual(r, self.control_dict,
+ msg="can't setdefault() ReadOnlyDict when unlocked")
+
+ def test_locked_set(self):
+ r = self.get_locked_ROD()
+ # TODO use |with self.assertRaises(AssertionError):| if/when we're
+ # all on 2.7.
+ try:
+ r['e'] = 2
+ except:
+ pass
+ else:
+ self.assertEqual(0, 1, msg="can set r['e'] when locked")
+
+ def test_locked_del(self):
+ r = self.get_locked_ROD()
+ try:
+ del r['e']
+ except:
+ pass
+ else:
+ self.assertEqual(0, 1, "can del r['e'] when locked")
+
+ def test_locked_popitem(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r.popitem)
+
+ def test_locked_update(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r.update, {})
+
+ def test_locked_set_default(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r.setdefault, {})
+
+ def test_locked_pop(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r.pop)
+
+ def test_locked_clear(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r.clear)
+
+ def test_locked_second_level_dict_pop(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r['c'].update, {})
+
+ def test_locked_second_level_list_pop(self):
+ r = self.get_locked_ROD()
+ with self.assertRaises(AttributeError):
+ r['e'].pop()
+
+ def test_locked_third_level_mutate(self):
+ r = self.get_locked_ROD()
+ with self.assertRaises(AttributeError):
+ r['d']['turtles'].append('turtle2')
+
+ def test_locked_object_in_tuple_mutate(self):
+ r = self.get_locked_ROD()
+ with self.assertRaises(AttributeError):
+ r['e'][2]['turtles'].append('turtle2')
+
+ def test_locked_second_level_dict_pop2(self):
+ r = self.get_locked_ROD()
+ self.assertRaises(AssertionError, r['c'].update, {})
+
+ def test_locked_second_level_list_pop2(self):
+ r = self.get_locked_ROD()
+ with self.assertRaises(AttributeError):
+ r['e'].pop()
+
+ def test_locked_third_level_mutate2(self):
+ r = self.get_locked_ROD()
+ with self.assertRaises(AttributeError):
+ r['d']['turtles'].append('turtle2')
+
+ def test_locked_object_in_tuple_mutate2(self):
+ r = self.get_locked_ROD()
+ with self.assertRaises(AttributeError):
+ r['e'][2]['turtles'].append('turtle2')
+
+ def test_locked_deepcopy_set(self):
+ r = self.get_locked_ROD()
+ c = deepcopy(r)
+ c['e'] = 'hey'
+ self.assertEqual(c['e'], 'hey', "can't set var in ROD after deepcopy")
+
+
+class TestActions(unittest.TestCase):
+ all_actions = ['a', 'b', 'c', 'd', 'e']
+ default_actions = ['b', 'c', 'd']
+
+ def test_verify_actions(self):
+ c = config.BaseConfig(initial_config_file='test/test.json')
+ try:
+ c.verify_actions(['not_a_real_action'])
+ except:
+ pass
+ else:
+ self.assertEqual(0, 1, msg="verify_actions() didn't die on invalid action")
+ c = config.BaseConfig(initial_config_file='test/test.json')
+ returned_actions = c.verify_actions(c.all_actions)
+ self.assertEqual(c.all_actions, returned_actions,
+ msg="returned actions from verify_actions() changed")
+
+ def test_default_actions(self):
+ c = config.BaseConfig(default_actions=self.default_actions,
+ all_actions=self.all_actions,
+ initial_config_file='test/test.json')
+ self.assertEqual(self.default_actions, c.get_actions(),
+ msg="default_actions broken")
+
+ def test_no_action1(self):
+ c = config.BaseConfig(default_actions=self.default_actions,
+ all_actions=self.all_actions,
+ initial_config_file='test/test.json')
+ c.parse_args(args=['foo', '--no-action', 'a'])
+ self.assertEqual(self.default_actions, c.get_actions(),
+ msg="--no-ACTION broken")
+
+ def test_no_action2(self):
+ c = config.BaseConfig(default_actions=self.default_actions,
+ all_actions=self.all_actions,
+ initial_config_file='test/test.json')
+ c.parse_args(args=['foo', '--no-c'])
+ self.assertEqual(['b', 'd'], c.get_actions(),
+ msg="--no-ACTION broken")
+
+ def test_add_action(self):
+ c = config.BaseConfig(default_actions=self.default_actions,
+ all_actions=self.all_actions,
+ initial_config_file='test/test.json')
+ c.parse_args(args=['foo', '--add-action', 'e'])
+ self.assertEqual(['b', 'c', 'd', 'e'], c.get_actions(),
+ msg="--add-action ACTION broken")
+
+ def test_only_action(self):
+ c = config.BaseConfig(default_actions=self.default_actions,
+ all_actions=self.all_actions,
+ initial_config_file='test/test.json')
+ c.parse_args(args=['foo', '--a', '--e'])
+ self.assertEqual(['a', 'e'], c.get_actions(),
+ msg="--ACTION broken")
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_base_diskutils.py b/testing/mozharness/test/test_base_diskutils.py
new file mode 100644
index 000000000..79d36692f
--- /dev/null
+++ b/testing/mozharness/test/test_base_diskutils.py
@@ -0,0 +1,84 @@
+import mock
+import unittest
+from mozharness.base.diskutils import convert_to, DiskutilsError, DiskSize, DiskInfo
+
+
+class TestDiskutils(unittest.TestCase):
+ def test_convert_to(self):
+ # 0 is 0 regardless from_unit/to_unit
+ self.assertTrue(convert_to(size=0, from_unit='GB', to_unit='MB') == 0)
+ size = 524288 # 512 * 1024
+ # converting from/to same unit
+ self.assertTrue(convert_to(size=size, from_unit='MB', to_unit='MB') == size)
+
+ self.assertTrue(convert_to(size=size, from_unit='MB', to_unit='GB') == 512)
+
+ self.assertRaises(DiskutilsError,
+ lambda: convert_to(size='a string', from_unit='MB', to_unit='MB'))
+ self.assertRaises(DiskutilsError,
+ lambda: convert_to(size=0, from_unit='foo', to_unit='MB'))
+ self.assertRaises(DiskutilsError,
+ lambda: convert_to(size=0, from_unit='MB', to_unit='foo'))
+
+
+class TestDiskInfo(unittest.TestCase):
+
+ def testDiskinfo_to(self):
+ di = DiskInfo()
+ self.assertTrue(di.unit == 'bytes')
+ self.assertTrue(di.free == 0)
+ self.assertTrue(di.used == 0)
+ self.assertTrue(di.total == 0)
+ # convert to GB
+ di._to('GB')
+ self.assertTrue(di.unit == 'GB')
+ self.assertTrue(di.free == 0)
+ self.assertTrue(di.used == 0)
+ self.assertTrue(di.total == 0)
+
+
+class MockStatvfs(object):
+ def __init__(self):
+ self.f_bsize = 0
+ self.f_frsize = 0
+ self.f_blocks = 0
+ self.f_bfree = 0
+ self.f_bavail = 0
+ self.f_files = 0
+ self.f_ffree = 0
+ self.f_favail = 0
+ self.f_flag = 0
+ self.f_namemax = 0
+
+
+class TestDiskSpace(unittest.TestCase):
+
+ @mock.patch('mozharness.base.diskutils.os')
+ def testDiskSpacePosix(self, mock_os):
+ ds = MockStatvfs()
+ mock_os.statvfs.return_value = ds
+ di = DiskSize()._posix_size('/')
+ self.assertTrue(di.unit == 'bytes')
+ self.assertTrue(di.free == 0)
+ self.assertTrue(di.used == 0)
+ self.assertTrue(di.total == 0)
+
+ @mock.patch('mozharness.base.diskutils.ctypes')
+ def testDiskSpaceWindows(self, mock_ctypes):
+ mock_ctypes.windll.kernel32.GetDiskFreeSpaceExA.return_value = 0
+ mock_ctypes.windll.kernel32.GetDiskFreeSpaceExW.return_value = 0
+ di = DiskSize()._windows_size('/c/')
+ self.assertTrue(di.unit == 'bytes')
+ self.assertTrue(di.free == 0)
+ self.assertTrue(di.used == 0)
+ self.assertTrue(di.total == 0)
+
+ @mock.patch('mozharness.base.diskutils.os')
+ @mock.patch('mozharness.base.diskutils.ctypes')
+ def testUnspportedPlafrom(self, mock_ctypes, mock_os):
+ mock_os.statvfs.side_effect = AttributeError('')
+ self.assertRaises(AttributeError, lambda: DiskSize()._posix_size('/'))
+ mock_ctypes.windll.kernel32.GetDiskFreeSpaceExW.side_effect = AttributeError('')
+ mock_ctypes.windll.kernel32.GetDiskFreeSpaceExA.side_effect = AttributeError('')
+ self.assertRaises(AttributeError, lambda: DiskSize()._windows_size('/'))
+ self.assertRaises(DiskutilsError, lambda: DiskSize().get_size(path='/', unit='GB'))
diff --git a/testing/mozharness/test/test_base_log.py b/testing/mozharness/test/test_base_log.py
new file mode 100644
index 000000000..0947834f7
--- /dev/null
+++ b/testing/mozharness/test/test_base_log.py
@@ -0,0 +1,42 @@
+import os
+import shutil
+import subprocess
+import unittest
+
+import mozharness.base.log as log
+
+tmp_dir = "test_log_dir"
+log_name = "test"
+
+
+def clean_log_dir():
+ if os.path.exists(tmp_dir):
+ shutil.rmtree(tmp_dir)
+
+
+def get_log_file_path(level=None):
+ if level:
+ return os.path.join(tmp_dir, "%s_%s.log" % (log_name, level))
+ return os.path.join(tmp_dir, "%s.log" % log_name)
+
+
+class TestLog(unittest.TestCase):
+ def setUp(self):
+ clean_log_dir()
+
+ def tearDown(self):
+ clean_log_dir()
+
+ def test_log_dir(self):
+ fh = open(tmp_dir, 'w')
+ fh.write("foo")
+ fh.close()
+ l = log.SimpleFileLogger(log_dir=tmp_dir, log_name=log_name,
+ log_to_console=False)
+ self.assertTrue(os.path.exists(tmp_dir))
+ l.log_message('blah')
+ self.assertTrue(os.path.exists(get_log_file_path()))
+ del(l)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_base_parallel.py b/testing/mozharness/test/test_base_parallel.py
new file mode 100644
index 000000000..8302be43a
--- /dev/null
+++ b/testing/mozharness/test/test_base_parallel.py
@@ -0,0 +1,26 @@
+import unittest
+
+from mozharness.base.parallel import ChunkingMixin
+
+
+class TestChunkingMixin(unittest.TestCase):
+ def setUp(self):
+ self.c = ChunkingMixin()
+
+ def test_one_chunk(self):
+ self.assertEquals(self.c.query_chunked_list([1, 3, 2], 1, 1), [1, 3, 2])
+
+ def test_sorted(self):
+ self.assertEquals(self.c.query_chunked_list([1, 3, 2], 1, 1, sort=True), [1, 2, 3])
+
+ def test_first_chunk(self):
+ self.assertEquals(self.c.query_chunked_list([4, 5, 4, 3], 1, 2), [4, 5])
+
+ def test_last_chunk(self):
+ self.assertEquals(self.c.query_chunked_list([1, 4, 5, 7, 5, 6], 3, 3), [5, 6])
+
+ def test_not_evenly_divisble(self):
+ thing = [1, 3, 6, 4, 3, 2, 6]
+ self.assertEquals(self.c.query_chunked_list(thing, 1, 3), [1, 3, 6])
+ self.assertEquals(self.c.query_chunked_list(thing, 2, 3), [4, 3])
+ self.assertEquals(self.c.query_chunked_list(thing, 3, 3), [2, 6])
diff --git a/testing/mozharness/test/test_base_python.py b/testing/mozharness/test/test_base_python.py
new file mode 100644
index 000000000..c013576f0
--- /dev/null
+++ b/testing/mozharness/test/test_base_python.py
@@ -0,0 +1,37 @@
+import os
+import unittest
+
+import mozharness.base.python as python
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestVirtualenvMixin(unittest.TestCase):
+ def test_package_versions(self):
+ example = os.path.join(here, 'pip-freeze.example.txt')
+ output = file(example).read()
+ mixin = python.VirtualenvMixin()
+ packages = mixin.package_versions(output)
+
+ # from the file
+ expected = {'MakeItSo': '0.2.6',
+ 'PyYAML': '3.10',
+ 'Tempita': '0.5.1',
+ 'WebOb': '1.2b3',
+ 'coverage': '3.5.1',
+ 'logilab-astng': '0.23.1',
+ 'logilab-common': '0.57.1',
+ 'mozdevice': '0.2',
+ 'mozhttpd': '0.3',
+ 'mozinfo': '0.3.3',
+ 'nose': '1.1.2',
+ 'pyflakes': '0.5.0',
+ 'pylint': '0.25.1',
+ 'virtualenv': '1.7.1.2',
+ 'wsgiref': '0.1.2'}
+
+ self.assertEqual(packages, expected)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_base_script.py b/testing/mozharness/test/test_base_script.py
new file mode 100644
index 000000000..c069a82f3
--- /dev/null
+++ b/testing/mozharness/test/test_base_script.py
@@ -0,0 +1,898 @@
+import gc
+import mock
+import os
+import re
+import shutil
+import tempfile
+import types
+import unittest
+PYWIN32 = False
+if os.name == 'nt':
+ try:
+ import win32file
+ PYWIN32 = True
+ except:
+ pass
+
+
+import mozharness.base.errors as errors
+import mozharness.base.log as log
+from mozharness.base.log import DEBUG, INFO, WARNING, ERROR, CRITICAL, FATAL, IGNORE
+import mozharness.base.script as script
+from mozharness.base.config import parse_config_file
+
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+test_string = '''foo
+bar
+baz'''
+
+
+class CleanupObj(script.ScriptMixin, log.LogMixin):
+ def __init__(self):
+ super(CleanupObj, self).__init__()
+ self.log_obj = None
+ self.config = {'log_level': ERROR}
+
+
+def cleanup(files=None):
+ files = files or []
+ files.extend(('test_logs', 'test_dir', 'tmpfile_stdout', 'tmpfile_stderr'))
+ gc.collect()
+ c = CleanupObj()
+ for f in files:
+ c.rmtree(f)
+
+
+def get_debug_script_obj():
+ s = script.BaseScript(config={'log_type': 'multi',
+ 'log_level': DEBUG},
+ initial_config_file='test/test.json')
+ return s
+
+
+def _post_fatal(self, **kwargs):
+ fh = open('tmpfile_stdout', 'w')
+ print >>fh, test_string
+ fh.close()
+
+
+# TestScript {{{1
+class TestScript(unittest.TestCase):
+ def setUp(self):
+ cleanup()
+ self.s = None
+ self.tmpdir = tempfile.mkdtemp(suffix='.mozharness')
+
+ def tearDown(self):
+ # Close the logfile handles, or windows can't remove the logs
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del(self.s)
+ cleanup([self.tmpdir])
+
+ # test _dump_config_hierarchy() when --dump-config-hierarchy is passed
+ def test_dump_config_hierarchy_valid_files_len(self):
+ try:
+ self.s = script.BaseScript(
+ initial_config_file='test/test.json',
+ option_args=['--cfg', 'test/test_override.py,test/test_override2.py'],
+ config={'dump_config_hierarchy': True}
+ )
+ except SystemExit:
+ local_cfg_files = parse_config_file('test_logs/localconfigfiles.json')
+ # first let's see if the correct number of config files were
+ # realized
+ self.assertEqual(
+ len(local_cfg_files), 4,
+ msg="--dump-config-hierarchy dumped wrong number of config files"
+ )
+
+ def test_dump_config_hierarchy_keys_unique_and_valid(self):
+ try:
+ self.s = script.BaseScript(
+ initial_config_file='test/test.json',
+ option_args=['--cfg', 'test/test_override.py,test/test_override2.py'],
+ config={'dump_config_hierarchy': True}
+ )
+ except SystemExit:
+ local_cfg_files = parse_config_file('test_logs/localconfigfiles.json')
+ # now let's see if only unique items were added from each config
+ t_override = local_cfg_files.get('test/test_override.py', {})
+ self.assertTrue(
+ t_override.get('keep_string') == "don't change me" and len(t_override.keys()) == 1,
+ msg="--dump-config-hierarchy dumped wrong keys/value for "
+ "`test/test_override.py`. There should only be one "
+ "item and it should be unique to all the other "
+ "items in test_log/localconfigfiles.json."
+ )
+
+ def test_dump_config_hierarchy_matches_self_config(self):
+ try:
+ ######
+ # we need temp_cfg because self.s will be gcollected (NoneType) by
+ # the time we get to SystemExit exception
+ # temp_cfg will differ from self.s.config because of
+ # 'dump_config_hierarchy'. we have to make a deepcopy because
+ # config is a locked dict
+ temp_s = script.BaseScript(
+ initial_config_file='test/test.json',
+ option_args=['--cfg', 'test/test_override.py,test/test_override2.py'],
+ )
+ from copy import deepcopy
+ temp_cfg = deepcopy(temp_s.config)
+ temp_cfg.update({'dump_config_hierarchy': True})
+ ######
+ self.s = script.BaseScript(
+ initial_config_file='test/test.json',
+ option_args=['--cfg', 'test/test_override.py,test/test_override2.py'],
+ config={'dump_config_hierarchy': True}
+ )
+ except SystemExit:
+ local_cfg_files = parse_config_file('test_logs/localconfigfiles.json')
+ # finally let's just make sure that all the items added up, equals
+ # what we started with: self.config
+ target_cfg = {}
+ for cfg_file in local_cfg_files:
+ target_cfg.update(local_cfg_files[cfg_file])
+ self.assertEqual(
+ target_cfg, temp_cfg,
+ msg="all of the items (combined) in each cfg file dumped via "
+ "--dump-config-hierarchy does not equal self.config "
+ )
+
+ # test _dump_config() when --dump-config is passed
+ def test_dump_config_equals_self_config(self):
+ try:
+ ######
+ # we need temp_cfg because self.s will be gcollected (NoneType) by
+ # the time we get to SystemExit exception
+ # temp_cfg will differ from self.s.config because of
+ # 'dump_config_hierarchy'. we have to make a deepcopy because
+ # config is a locked dict
+ temp_s = script.BaseScript(
+ initial_config_file='test/test.json',
+ option_args=['--cfg', 'test/test_override.py,test/test_override2.py'],
+ )
+ from copy import deepcopy
+ temp_cfg = deepcopy(temp_s.config)
+ temp_cfg.update({'dump_config': True})
+ ######
+ self.s = script.BaseScript(
+ initial_config_file='test/test.json',
+ option_args=['--cfg', 'test/test_override.py,test/test_override2.py'],
+ config={'dump_config': True}
+ )
+ except SystemExit:
+ target_cfg = parse_config_file('test_logs/localconfig.json')
+ self.assertEqual(
+ target_cfg, temp_cfg,
+ msg="all of the items (combined) in each cfg file dumped via "
+ "--dump-config does not equal self.config "
+ )
+
+ def test_nonexistent_mkdir_p(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self.s.mkdir_p('test_dir/foo/bar/baz')
+ self.assertTrue(os.path.isdir('test_dir/foo/bar/baz'),
+ msg="mkdir_p error")
+
+ def test_existing_mkdir_p(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ os.makedirs('test_dir/foo/bar/baz')
+ self.s.mkdir_p('test_dir/foo/bar/baz')
+ self.assertTrue(os.path.isdir('test_dir/foo/bar/baz'),
+ msg="mkdir_p error when dir exists")
+
+ def test_chdir(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ cwd = os.getcwd()
+ self.s.chdir('test_logs')
+ self.assertEqual(os.path.join(cwd, "test_logs"), os.getcwd(),
+ msg="chdir error")
+ self.s.chdir(cwd)
+
+ def _test_log_helper(self, obj):
+ obj.debug("Testing DEBUG")
+ obj.warning("Testing WARNING")
+ obj.error("Testing ERROR")
+ obj.critical("Testing CRITICAL")
+ try:
+ obj.fatal("Testing FATAL")
+ except SystemExit:
+ pass
+ else:
+ self.assertTrue(False, msg="fatal() didn't SystemExit!")
+
+ def test_log(self):
+ self.s = get_debug_script_obj()
+ self.s.log_obj = None
+ self._test_log_helper(self.s)
+ del(self.s)
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self._test_log_helper(self.s)
+
+ def test_run_nonexistent_command(self):
+ self.s = get_debug_script_obj()
+ self.s.run_command(command="this_cmd_should_not_exist --help",
+ env={'GARBLE': 'FARG'},
+ error_list=errors.PythonErrorList)
+ error_logsize = os.path.getsize("test_logs/test_error.log")
+ self.assertTrue(error_logsize > 0,
+ msg="command not found error not hit")
+
+ def test_run_command_in_bad_dir(self):
+ self.s = get_debug_script_obj()
+ self.s.run_command(command="ls",
+ cwd='/this_dir_should_not_exist',
+ error_list=errors.PythonErrorList)
+ error_logsize = os.path.getsize("test_logs/test_error.log")
+ self.assertTrue(error_logsize > 0,
+ msg="bad dir error not hit")
+
+ def test_get_output_from_command_in_bad_dir(self):
+ self.s = get_debug_script_obj()
+ self.s.get_output_from_command(command="ls", cwd='/this_dir_should_not_exist')
+ error_logsize = os.path.getsize("test_logs/test_error.log")
+ self.assertTrue(error_logsize > 0,
+ msg="bad dir error not hit")
+
+ def test_get_output_from_command_with_missing_file(self):
+ self.s = get_debug_script_obj()
+ self.s.get_output_from_command(command="ls /this_file_should_not_exist")
+ error_logsize = os.path.getsize("test_logs/test_error.log")
+ self.assertTrue(error_logsize > 0,
+ msg="bad file error not hit")
+
+ def test_get_output_from_command_with_missing_file2(self):
+ self.s = get_debug_script_obj()
+ self.s.run_command(
+ command="cat mozharness/base/errors.py",
+ error_list=[{
+ 'substr': "error", 'level': ERROR
+ }, {
+ 'regex': re.compile(',$'), 'level': IGNORE,
+ }, {
+ 'substr': ']$', 'level': WARNING,
+ }])
+ error_logsize = os.path.getsize("test_logs/test_error.log")
+ self.assertTrue(error_logsize > 0,
+ msg="error list not working properly")
+
+ def test_download_unpack(self):
+ # NOTE: The action is called *download*, however, it can work for files in disk
+ self.s = get_debug_script_obj()
+
+ archives_path = os.path.join(here, 'helper_files', 'archives')
+
+ # Test basic decompression
+ for archive in ('archive.tar', 'archive.tar.bz2', 'archive.tar.gz', 'archive.zip'):
+ self.s.download_unpack(
+ url=os.path.join(archives_path, archive),
+ extract_to=self.tmpdir
+ )
+ self.assertIn('script.sh', os.listdir(os.path.join(self.tmpdir, 'bin')))
+ self.assertIn('lorem.txt', os.listdir(self.tmpdir))
+ shutil.rmtree(self.tmpdir)
+
+ # Test permissions for extracted entries from zip archive
+ self.s.download_unpack(
+ url=os.path.join(archives_path, 'archive.zip'),
+ extract_to=self.tmpdir,
+ )
+ file_stats = os.stat(os.path.join(self.tmpdir, 'bin', 'script.sh'))
+ orig_fstats = os.stat(os.path.join(archives_path, 'reference', 'bin', 'script.sh'))
+ self.assertEqual(file_stats.st_mode, orig_fstats.st_mode)
+ shutil.rmtree(self.tmpdir)
+
+ # Test unzip specific dirs only
+ self.s.download_unpack(
+ url=os.path.join(archives_path, 'archive.zip'),
+ extract_to=self.tmpdir,
+ extract_dirs=['bin/*']
+ )
+ self.assertIn('bin', os.listdir(self.tmpdir))
+ self.assertNotIn('lorem.txt', os.listdir(self.tmpdir))
+ shutil.rmtree(self.tmpdir)
+
+ # Test for invalid filenames (Windows only)
+ if PYWIN32:
+ with self.assertRaises(IOError):
+ self.s.download_unpack(
+ url=os.path.join(archives_path, 'archive_invalid_filename.zip'),
+ extract_to=self.tmpdir
+ )
+
+ def test_unpack(self):
+ self.s = get_debug_script_obj()
+
+ archives_path = os.path.join(here, 'helper_files', 'archives')
+
+ # Test basic decompression
+ for archive in ('archive.tar', 'archive.tar.bz2', 'archive.tar.gz', 'archive.zip'):
+ self.s.unpack(os.path.join(archives_path, archive), self.tmpdir)
+ self.assertIn('script.sh', os.listdir(os.path.join(self.tmpdir, 'bin')))
+ self.assertIn('lorem.txt', os.listdir(self.tmpdir))
+ shutil.rmtree(self.tmpdir)
+
+ # Test permissions for extracted entries from zip archive
+ self.s.unpack(os.path.join(archives_path, 'archive.zip'), self.tmpdir)
+ file_stats = os.stat(os.path.join(self.tmpdir, 'bin', 'script.sh'))
+ orig_fstats = os.stat(os.path.join(archives_path, 'reference', 'bin', 'script.sh'))
+ self.assertEqual(file_stats.st_mode, orig_fstats.st_mode)
+ shutil.rmtree(self.tmpdir)
+
+ # Test extract specific dirs only
+ self.s.unpack(os.path.join(archives_path, 'archive.zip'), self.tmpdir,
+ extract_dirs=['bin/*'])
+ self.assertIn('bin', os.listdir(self.tmpdir))
+ self.assertNotIn('lorem.txt', os.listdir(self.tmpdir))
+ shutil.rmtree(self.tmpdir)
+
+ # Test for invalid filenames (Windows only)
+ if PYWIN32:
+ with self.assertRaises(IOError):
+ self.s.unpack(os.path.join(archives_path, 'archive_invalid_filename.zip'),
+ self.tmpdir)
+
+
+# TestHelperFunctions {{{1
+class TestHelperFunctions(unittest.TestCase):
+ temp_file = "test_dir/mozilla"
+
+ def setUp(self):
+ cleanup()
+ self.s = None
+
+ def tearDown(self):
+ # Close the logfile handles, or windows can't remove the logs
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del(self.s)
+ cleanup()
+
+ def _create_temp_file(self, contents=test_string):
+ os.mkdir('test_dir')
+ fh = open(self.temp_file, "w+")
+ fh.write(contents)
+ fh.close
+
+ def test_mkdir_p(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self.s.mkdir_p('test_dir')
+ self.assertTrue(os.path.isdir('test_dir'),
+ msg="mkdir_p error")
+
+ def test_get_output_from_command(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ contents = self.s.get_output_from_command(["bash", "-c", "cat %s" % self.temp_file])
+ self.assertEqual(test_string, contents,
+ msg="get_output_from_command('cat file') differs from fh.write")
+
+ def test_run_command(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ temp_file_name = os.path.basename(self.temp_file)
+ self.assertEqual(self.s.run_command("cat %s" % temp_file_name,
+ cwd="test_dir"), 0,
+ msg="run_command('cat file') did not exit 0")
+
+ def test_move1(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ temp_file2 = '%s2' % self.temp_file
+ self.s.move(self.temp_file, temp_file2)
+ self.assertFalse(os.path.exists(self.temp_file),
+ msg="%s still exists after move()" % self.temp_file)
+
+ def test_move2(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ temp_file2 = '%s2' % self.temp_file
+ self.s.move(self.temp_file, temp_file2)
+ self.assertTrue(os.path.exists(temp_file2),
+ msg="%s doesn't exist after move()" % temp_file2)
+
+ def test_copyfile(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ temp_file2 = '%s2' % self.temp_file
+ self.s.copyfile(self.temp_file, temp_file2)
+ self.assertEqual(os.path.getsize(self.temp_file),
+ os.path.getsize(temp_file2),
+ msg="%s and %s are different sizes after copyfile()" %
+ (self.temp_file, temp_file2))
+
+ def test_existing_rmtree(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self.s.mkdir_p('test_dir/foo/bar/baz')
+ self.s.rmtree('test_dir')
+ self.assertFalse(os.path.exists('test_dir'),
+ msg="rmtree unsuccessful")
+
+ def test_nonexistent_rmtree(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ status = self.s.rmtree('test_dir')
+ self.assertFalse(status, msg="nonexistent rmtree error")
+
+ @unittest.skipUnless(PYWIN32, "PyWin32 specific")
+ def test_long_dir_rmtree(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ # create a very long path that the command-prompt cannot delete
+ # by using unicode format (max path length 32000)
+ path = u'\\\\?\\%s\\test_dir' % os.getcwd()
+ win32file.CreateDirectoryExW(u'.', path)
+
+ for x in range(0, 20):
+ print("path=%s" % path)
+ path = path + u'\\%sxxxxxxxxxxxxxxxxxxxx' % x
+ win32file.CreateDirectoryExW(u'.', path)
+ self.s.rmtree('test_dir')
+ self.assertFalse(os.path.exists('test_dir'),
+ msg="rmtree unsuccessful")
+
+ @unittest.skipUnless(PYWIN32, "PyWin32 specific")
+ def test_chmod_rmtree(self):
+ self._create_temp_file()
+ win32file.SetFileAttributesW(self.temp_file, win32file.FILE_ATTRIBUTE_READONLY)
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self.s.rmtree('test_dir')
+ self.assertFalse(os.path.exists('test_dir'),
+ msg="rmtree unsuccessful")
+
+ @unittest.skipIf(os.name == "nt", "Not for Windows")
+ def test_chmod(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self.s.chmod(self.temp_file, 0100700)
+ self.assertEqual(os.stat(self.temp_file)[0], 33216,
+ msg="chmod unsuccessful")
+
+ def test_env_normal(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ script_env = self.s.query_env()
+ self.assertEqual(script_env, os.environ,
+ msg="query_env() != env\n%s\n%s" % (script_env, os.environ))
+
+ def test_env_normal2(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ self.s.query_env()
+ script_env = self.s.query_env()
+ self.assertEqual(script_env, os.environ,
+ msg="Second query_env() != env\n%s\n%s" % (script_env, os.environ))
+
+ def test_env_partial(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ script_env = self.s.query_env(partial_env={'foo': 'bar'})
+ self.assertTrue('foo' in script_env and script_env['foo'] == 'bar')
+
+ def test_env_path(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ partial_path = "yaddayadda:%(PATH)s"
+ full_path = partial_path % {'PATH': os.environ['PATH']}
+ script_env = self.s.query_env(partial_env={'PATH': partial_path})
+ self.assertEqual(script_env['PATH'], full_path)
+
+ def test_query_exe(self):
+ self.s = script.BaseScript(
+ initial_config_file='test/test.json',
+ config={'exes': {'foo': 'bar'}},
+ )
+ path = self.s.query_exe('foo')
+ self.assertEqual(path, 'bar')
+
+ def test_query_exe_string_replacement(self):
+ self.s = script.BaseScript(
+ initial_config_file='test/test.json',
+ config={
+ 'base_work_dir': 'foo',
+ 'work_dir': 'bar',
+ 'exes': {'foo': os.path.join('%(abs_work_dir)s', 'baz')},
+ },
+ )
+ path = self.s.query_exe('foo')
+ self.assertEqual(path, os.path.join('foo', 'bar', 'baz'))
+
+ def test_read_from_file(self):
+ self._create_temp_file()
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ contents = self.s.read_from_file(self.temp_file)
+ self.assertEqual(contents, test_string)
+
+ def test_read_from_nonexistent_file(self):
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+ contents = self.s.read_from_file("nonexistent_file!!!")
+ self.assertEqual(contents, None)
+
+
+# TestScriptLogging {{{1
+class TestScriptLogging(unittest.TestCase):
+ # I need a log watcher helper function, here and in test_log.
+ def setUp(self):
+ cleanup()
+ self.s = None
+
+ def tearDown(self):
+ # Close the logfile handles, or windows can't remove the logs
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del(self.s)
+ cleanup()
+
+ def test_info_logsize(self):
+ self.s = script.BaseScript(config={'log_type': 'multi'},
+ initial_config_file='test/test.json')
+ info_logsize = os.path.getsize("test_logs/test_info.log")
+ self.assertTrue(info_logsize > 0,
+ msg="initial info logfile missing/size 0")
+
+ def test_add_summary_info(self):
+ self.s = script.BaseScript(config={'log_type': 'multi'},
+ initial_config_file='test/test.json')
+ info_logsize = os.path.getsize("test_logs/test_info.log")
+ self.s.add_summary('one')
+ info_logsize2 = os.path.getsize("test_logs/test_info.log")
+ self.assertTrue(info_logsize < info_logsize2,
+ msg="add_summary() info not logged")
+
+ def test_add_summary_warning(self):
+ self.s = script.BaseScript(config={'log_type': 'multi'},
+ initial_config_file='test/test.json')
+ warning_logsize = os.path.getsize("test_logs/test_warning.log")
+ self.s.add_summary('two', level=WARNING)
+ warning_logsize2 = os.path.getsize("test_logs/test_warning.log")
+ self.assertTrue(warning_logsize < warning_logsize2,
+ msg="add_summary(level=%s) not logged in warning log" % WARNING)
+
+ def test_summary(self):
+ self.s = script.BaseScript(config={'log_type': 'multi'},
+ initial_config_file='test/test.json')
+ self.s.add_summary('one')
+ self.s.add_summary('two', level=WARNING)
+ info_logsize = os.path.getsize("test_logs/test_info.log")
+ warning_logsize = os.path.getsize("test_logs/test_warning.log")
+ self.s.summary()
+ info_logsize2 = os.path.getsize("test_logs/test_info.log")
+ warning_logsize2 = os.path.getsize("test_logs/test_warning.log")
+ msg = ""
+ if info_logsize >= info_logsize2:
+ msg += "summary() didn't log to info!\n"
+ if warning_logsize >= warning_logsize2:
+ msg += "summary() didn't log to warning!\n"
+ self.assertEqual(msg, "", msg=msg)
+
+ def _test_log_level(self, log_level, log_level_file_list):
+ self.s = script.BaseScript(config={'log_type': 'multi'},
+ initial_config_file='test/test.json')
+ if log_level != FATAL:
+ self.s.log('testing', level=log_level)
+ else:
+ self.s._post_fatal = types.MethodType(_post_fatal, self.s)
+ try:
+ self.s.fatal('testing')
+ except SystemExit:
+ contents = None
+ if os.path.exists('tmpfile_stdout'):
+ fh = open('tmpfile_stdout')
+ contents = fh.read()
+ fh.close()
+ self.assertEqual(contents.rstrip(), test_string, "_post_fatal failed!")
+ del(self.s)
+ msg = ""
+ for level in log_level_file_list:
+ log_path = "test_logs/test_%s.log" % level
+ if not os.path.exists(log_path):
+ msg += "%s doesn't exist!\n" % log_path
+ else:
+ filesize = os.path.getsize(log_path)
+ if not filesize > 0:
+ msg += "%s is size 0!\n" % log_path
+ self.assertEqual(msg, "", msg=msg)
+
+ def test_debug(self):
+ self._test_log_level(DEBUG, [])
+
+ def test_ignore(self):
+ self._test_log_level(IGNORE, [])
+
+ def test_info(self):
+ self._test_log_level(INFO, [INFO])
+
+ def test_warning(self):
+ self._test_log_level(WARNING, [INFO, WARNING])
+
+ def test_error(self):
+ self._test_log_level(ERROR, [INFO, WARNING, ERROR])
+
+ def test_critical(self):
+ self._test_log_level(CRITICAL, [INFO, WARNING, ERROR, CRITICAL])
+
+ def test_fatal(self):
+ self._test_log_level(FATAL, [INFO, WARNING, ERROR, CRITICAL, FATAL])
+
+
+# TestRetry {{{1
+class NewError(Exception):
+ pass
+
+
+class OtherError(Exception):
+ pass
+
+
+class TestRetry(unittest.TestCase):
+ def setUp(self):
+ self.ATTEMPT_N = 1
+ self.s = script.BaseScript(initial_config_file='test/test.json')
+
+ def tearDown(self):
+ # Close the logfile handles, or windows can't remove the logs
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del(self.s)
+ cleanup()
+
+ def _succeedOnSecondAttempt(self, foo=None, exception=Exception):
+ if self.ATTEMPT_N == 2:
+ self.ATTEMPT_N += 1
+ return
+ self.ATTEMPT_N += 1
+ raise exception("Fail")
+
+ def _raiseCustomException(self):
+ return self._succeedOnSecondAttempt(exception=NewError)
+
+ def _alwaysPass(self):
+ self.ATTEMPT_N += 1
+ return True
+
+ def _mirrorArgs(self, *args, **kwargs):
+ return args, kwargs
+
+ def _alwaysFail(self):
+ raise Exception("Fail")
+
+ def testRetrySucceed(self):
+ # Will raise if anything goes wrong
+ self.s.retry(self._succeedOnSecondAttempt, attempts=2, sleeptime=0)
+
+ def testRetryFailWithoutCatching(self):
+ self.assertRaises(Exception, self.s.retry, self._alwaysFail, sleeptime=0,
+ exceptions=())
+
+ def testRetryFailEnsureRaisesLastException(self):
+ self.assertRaises(SystemExit, self.s.retry, self._alwaysFail, sleeptime=0,
+ error_level=FATAL)
+
+ def testRetrySelectiveExceptionSucceed(self):
+ self.s.retry(self._raiseCustomException, attempts=2, sleeptime=0,
+ retry_exceptions=(NewError,))
+
+ def testRetrySelectiveExceptionFail(self):
+ self.assertRaises(NewError, self.s.retry, self._raiseCustomException, attempts=2,
+ sleeptime=0, retry_exceptions=(OtherError,))
+
+ # TODO: figure out a way to test that the sleep actually happened
+ def testRetryWithSleep(self):
+ self.s.retry(self._succeedOnSecondAttempt, attempts=2, sleeptime=1)
+
+ def testRetryOnlyRunOnce(self):
+ """Tests that retry() doesn't call the action again after success"""
+ self.s.retry(self._alwaysPass, attempts=3, sleeptime=0)
+ # self.ATTEMPT_N gets increased regardless of pass/fail
+ self.assertEquals(2, self.ATTEMPT_N)
+
+ def testRetryReturns(self):
+ ret = self.s.retry(self._alwaysPass, sleeptime=0)
+ self.assertEquals(ret, True)
+
+ def testRetryCleanupIsCalled(self):
+ cleanup = mock.Mock()
+ self.s.retry(self._succeedOnSecondAttempt, cleanup=cleanup, sleeptime=0)
+ self.assertEquals(cleanup.call_count, 1)
+
+ def testRetryArgsPassed(self):
+ args = (1, 'two', 3)
+ kwargs = dict(foo='a', bar=7)
+ ret = self.s.retry(self._mirrorArgs, args=args, kwargs=kwargs.copy(), sleeptime=0)
+ print ret
+ self.assertEqual(ret[0], args)
+ self.assertEqual(ret[1], kwargs)
+
+
+class BaseScriptWithDecorators(script.BaseScript):
+ def __init__(self, *args, **kwargs):
+ super(BaseScriptWithDecorators, self).__init__(*args, **kwargs)
+
+ self.pre_run_1_args = []
+ self.raise_during_pre_run_1 = False
+ self.pre_action_1_args = []
+ self.raise_during_pre_action_1 = False
+ self.pre_action_2_args = []
+ self.pre_action_3_args = []
+ self.post_action_1_args = []
+ self.raise_during_post_action_1 = False
+ self.post_action_2_args = []
+ self.post_action_3_args = []
+ self.post_run_1_args = []
+ self.raise_during_post_run_1 = False
+ self.post_run_2_args = []
+ self.raise_during_build = False
+
+ @script.PreScriptRun
+ def pre_run_1(self, *args, **kwargs):
+ self.pre_run_1_args.append((args, kwargs))
+
+ if self.raise_during_pre_run_1:
+ raise Exception(self.raise_during_pre_run_1)
+
+ @script.PreScriptAction
+ def pre_action_1(self, *args, **kwargs):
+ self.pre_action_1_args.append((args, kwargs))
+
+ if self.raise_during_pre_action_1:
+ raise Exception(self.raise_during_pre_action_1)
+
+ @script.PreScriptAction
+ def pre_action_2(self, *args, **kwargs):
+ self.pre_action_2_args.append((args, kwargs))
+
+ @script.PreScriptAction('clobber')
+ def pre_action_3(self, *args, **kwargs):
+ self.pre_action_3_args.append((args, kwargs))
+
+ @script.PostScriptAction
+ def post_action_1(self, *args, **kwargs):
+ self.post_action_1_args.append((args, kwargs))
+
+ if self.raise_during_post_action_1:
+ raise Exception(self.raise_during_post_action_1)
+
+ @script.PostScriptAction
+ def post_action_2(self, *args, **kwargs):
+ self.post_action_2_args.append((args, kwargs))
+
+ @script.PostScriptAction('build')
+ def post_action_3(self, *args, **kwargs):
+ self.post_action_3_args.append((args, kwargs))
+
+ @script.PostScriptRun
+ def post_run_1(self, *args, **kwargs):
+ self.post_run_1_args.append((args, kwargs))
+
+ if self.raise_during_post_run_1:
+ raise Exception(self.raise_during_post_run_1)
+
+ @script.PostScriptRun
+ def post_run_2(self, *args, **kwargs):
+ self.post_run_2_args.append((args, kwargs))
+
+ def build(self):
+ if self.raise_during_build:
+ raise Exception(self.raise_during_build)
+
+
+class TestScriptDecorators(unittest.TestCase):
+ def setUp(self):
+ cleanup()
+ self.s = None
+
+ def tearDown(self):
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del self.s
+
+ cleanup()
+
+ def test_decorators_registered(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+
+ self.assertEqual(len(self.s._listeners['pre_run']), 1)
+ self.assertEqual(len(self.s._listeners['pre_action']), 3)
+ self.assertEqual(len(self.s._listeners['post_action']), 3)
+ self.assertEqual(len(self.s._listeners['post_run']), 3)
+
+ def test_pre_post_fired(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+ self.s.run()
+
+ self.assertEqual(len(self.s.pre_run_1_args), 1)
+ self.assertEqual(len(self.s.pre_action_1_args), 2)
+ self.assertEqual(len(self.s.pre_action_2_args), 2)
+ self.assertEqual(len(self.s.pre_action_3_args), 1)
+ self.assertEqual(len(self.s.post_action_1_args), 2)
+ self.assertEqual(len(self.s.post_action_2_args), 2)
+ self.assertEqual(len(self.s.post_action_3_args), 1)
+ self.assertEqual(len(self.s.post_run_1_args), 1)
+
+ self.assertEqual(self.s.pre_run_1_args[0], ((), {}))
+
+ self.assertEqual(self.s.pre_action_1_args[0], (('clobber',), {}))
+ self.assertEqual(self.s.pre_action_1_args[1], (('build',), {}))
+
+ # pre_action_3 should only get called for the action it is registered
+ # with.
+ self.assertEqual(self.s.pre_action_3_args[0], (('clobber',), {}))
+
+ self.assertEqual(self.s.post_action_1_args[0][0], ('clobber',))
+ self.assertEqual(self.s.post_action_1_args[0][1], dict(success=True))
+ self.assertEqual(self.s.post_action_1_args[1][0], ('build',))
+ self.assertEqual(self.s.post_action_1_args[1][1], dict(success=True))
+
+ # post_action_3 should only get called for the action it is registered
+ # with.
+ self.assertEqual(self.s.post_action_3_args[0], (('build',),
+ dict(success=True)))
+
+ self.assertEqual(self.s.post_run_1_args[0], ((), {}))
+
+ def test_post_always_fired(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+ self.s.raise_during_build = 'Testing post always fired.'
+
+ with self.assertRaises(SystemExit):
+ self.s.run()
+
+ self.assertEqual(len(self.s.pre_run_1_args), 1)
+ self.assertEqual(len(self.s.pre_action_1_args), 2)
+ self.assertEqual(len(self.s.post_action_1_args), 2)
+ self.assertEqual(len(self.s.post_action_2_args), 2)
+ self.assertEqual(len(self.s.post_run_1_args), 1)
+ self.assertEqual(len(self.s.post_run_2_args), 1)
+
+ self.assertEqual(self.s.post_action_1_args[0][1], dict(success=True))
+ self.assertEqual(self.s.post_action_1_args[1][1], dict(success=False))
+ self.assertEqual(self.s.post_action_2_args[1][1], dict(success=False))
+
+ def test_pre_run_exception(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+ self.s.raise_during_pre_run_1 = 'Error during pre run 1'
+
+ with self.assertRaises(SystemExit):
+ self.s.run()
+
+ self.assertEqual(len(self.s.pre_run_1_args), 1)
+ self.assertEqual(len(self.s.pre_action_1_args), 0)
+ self.assertEqual(len(self.s.post_run_1_args), 1)
+ self.assertEqual(len(self.s.post_run_2_args), 1)
+
+ def test_pre_action_exception(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+ self.s.raise_during_pre_action_1 = 'Error during pre 1'
+
+ with self.assertRaises(SystemExit):
+ self.s.run()
+
+ self.assertEqual(len(self.s.pre_run_1_args), 1)
+ self.assertEqual(len(self.s.pre_action_1_args), 1)
+ self.assertEqual(len(self.s.pre_action_2_args), 0)
+ self.assertEqual(len(self.s.post_action_1_args), 1)
+ self.assertEqual(len(self.s.post_action_2_args), 1)
+ self.assertEqual(len(self.s.post_run_1_args), 1)
+ self.assertEqual(len(self.s.post_run_2_args), 1)
+
+ def test_post_action_exception(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+ self.s.raise_during_post_action_1 = 'Error during post 1'
+
+ with self.assertRaises(SystemExit):
+ self.s.run()
+
+ self.assertEqual(len(self.s.pre_run_1_args), 1)
+ self.assertEqual(len(self.s.post_action_1_args), 1)
+ self.assertEqual(len(self.s.post_action_2_args), 1)
+ self.assertEqual(len(self.s.post_run_1_args), 1)
+ self.assertEqual(len(self.s.post_run_2_args), 1)
+
+ def test_post_run_exception(self):
+ self.s = BaseScriptWithDecorators(initial_config_file='test/test.json')
+ self.s.raise_during_post_run_1 = 'Error during post run 1'
+
+ with self.assertRaises(SystemExit):
+ self.s.run()
+
+ self.assertEqual(len(self.s.post_run_1_args), 1)
+ self.assertEqual(len(self.s.post_run_2_args), 1)
+
+
+# main {{{1
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_base_transfer.py b/testing/mozharness/test/test_base_transfer.py
new file mode 100644
index 000000000..f3f907254
--- /dev/null
+++ b/testing/mozharness/test/test_base_transfer.py
@@ -0,0 +1,127 @@
+import unittest
+import mock
+
+from mozharness.base.transfer import TransferMixin
+
+
+class GoodMockMixin(object):
+ def query_abs_dirs(self):
+ return {'abs_work_dir': ''}
+
+ def query_exe(self, exe):
+ return exe
+
+ def info(self, msg):
+ pass
+
+ def log(self, msg, level):
+ pass
+
+ def run_command(*args, **kwargs):
+ return 0
+
+
+class BadMockMixin(GoodMockMixin):
+ def run_command(*args, **kwargs):
+ return 1
+
+
+class GoodTransferMixin(TransferMixin, GoodMockMixin):
+ pass
+
+
+class BadTransferMixin(TransferMixin, BadMockMixin):
+ pass
+
+
+class TestTranferMixin(unittest.TestCase):
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_upload_dir_not_a_dir(self, os_mock):
+ # simulates upload dir but dir is a file
+ os_mock.path.isdir.return_value = False
+ tm = GoodTransferMixin()
+ self.assertEqual(tm.rsync_upload_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',), -1)
+
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_upload_fails_create_remote_dir(self, os_mock):
+ # we cannot create the remote directory
+ os_mock.path.isdir.return_value = True
+ tm = BadTransferMixin()
+ self.assertEqual(tm.rsync_upload_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',
+ create_remote_directory=True), -2)
+
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_upload_fails_do_not_create_remote_dir(self, os_mock):
+ # upload fails, remote directory is not created
+ os_mock.path.isdir.return_value = True
+ tm = BadTransferMixin()
+ self.assertEqual(tm.rsync_upload_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',
+ create_remote_directory=False), -3)
+
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_upload(self, os_mock):
+ # simulates an upload with no errors
+ os_mock.path.isdir.return_value = True
+ tm = GoodTransferMixin()
+ self.assertEqual(tm.rsync_upload_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',
+ create_remote_directory=False), None)
+
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_download_in_not_a_dir(self, os_mock):
+ # local path is not a directory
+ os_mock.path.isdir.return_value = False
+ tm = GoodTransferMixin()
+ self.assertEqual(tm.rsync_download_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',), -1)
+
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_download(self, os_mock):
+ # successful rsync
+ os_mock.path.isdir.return_value = True
+ tm = GoodTransferMixin()
+ self.assertEqual(tm.rsync_download_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',), None)
+
+ @mock.patch('mozharness.base.transfer.os')
+ def test_rsync_download_fail(self, os_mock):
+ # ops download has failed
+ os_mock.path.isdir.return_value = True
+ tm = BadTransferMixin()
+ self.assertEqual(tm.rsync_download_directory(
+ local_path='',
+ ssh_key='my ssh key',
+ ssh_user='my ssh user',
+ remote_host='remote host',
+ remote_path='remote path',), -3)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_base_vcs_mercurial.py b/testing/mozharness/test/test_base_vcs_mercurial.py
new file mode 100644
index 000000000..1463d8963
--- /dev/null
+++ b/testing/mozharness/test/test_base_vcs_mercurial.py
@@ -0,0 +1,440 @@
+import os
+import platform
+import shutil
+import tempfile
+import unittest
+
+import mozharness.base.errors as errors
+import mozharness.base.vcs.mercurial as mercurial
+
+test_string = '''foo
+bar
+baz'''
+
+HG = ['hg'] + mercurial.HG_OPTIONS
+
+# Known default .hgrc
+os.environ['HGRCPATH'] = os.path.abspath(os.path.join(os.path.dirname(__file__), 'helper_files', '.hgrc'))
+
+
+def cleanup():
+ if os.path.exists('test_logs'):
+ shutil.rmtree('test_logs')
+ if os.path.exists('test_dir'):
+ if os.path.isdir('test_dir'):
+ shutil.rmtree('test_dir')
+ else:
+ os.remove('test_dir')
+ for filename in ('localconfig.json', 'localconfig.json.bak'):
+ if os.path.exists(filename):
+ os.remove(filename)
+
+
+def get_mercurial_vcs_obj():
+ m = mercurial.MercurialVCS()
+ m.config = {}
+ return m
+
+
+def get_revisions(dest):
+ m = get_mercurial_vcs_obj()
+ retval = []
+ for rev in m.get_output_from_command(HG + ['log', '-R', dest, '--template', '{node}\n']).split('\n'):
+ rev = rev.strip()
+ if not rev:
+ continue
+ retval.append(rev)
+ return retval
+
+
+class TestMakeAbsolute(unittest.TestCase):
+ # _make_absolute() doesn't play nicely with windows/msys paths.
+ # TODO: fix _make_absolute, write it out of the picture, or determine
+ # that it's not needed on windows.
+ if platform.system() not in ("Windows",):
+ def test_absolute_path(self):
+ m = get_mercurial_vcs_obj()
+ self.assertEquals(m._make_absolute("/foo/bar"), "/foo/bar")
+
+ def test_relative_path(self):
+ m = get_mercurial_vcs_obj()
+ self.assertEquals(m._make_absolute("foo/bar"), os.path.abspath("foo/bar"))
+
+ def test_HTTP_paths(self):
+ m = get_mercurial_vcs_obj()
+ self.assertEquals(m._make_absolute("http://foo/bar"), "http://foo/bar")
+
+ def test_absolute_file_path(self):
+ m = get_mercurial_vcs_obj()
+ self.assertEquals(m._make_absolute("file:///foo/bar"), "file:///foo/bar")
+
+ def test_relative_file_path(self):
+ m = get_mercurial_vcs_obj()
+ self.assertEquals(m._make_absolute("file://foo/bar"), "file://%s/foo/bar" % os.getcwd())
+
+
+class TestHg(unittest.TestCase):
+ def _init_hg_repo(self, hg_obj, repodir):
+ hg_obj.run_command(["bash",
+ os.path.join(os.path.dirname(__file__),
+ "helper_files", "init_hgrepo.sh"),
+ repodir])
+
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp()
+ self.repodir = os.path.join(self.tmpdir, 'repo')
+ m = get_mercurial_vcs_obj()
+ self._init_hg_repo(m, self.repodir)
+ self.revisions = get_revisions(self.repodir)
+ self.wc = os.path.join(self.tmpdir, 'wc')
+ self.pwd = os.getcwd()
+
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir)
+ os.chdir(self.pwd)
+
+ def test_get_branch(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+ b = m.get_branch_from_path(self.wc)
+ self.assertEquals(b, 'default')
+
+ def test_get_branches(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+ branches = m.get_branches_from_path(self.wc)
+ self.assertEquals(sorted(branches), sorted(["branch2", "default"]))
+
+ def test_clone(self):
+ m = get_mercurial_vcs_obj()
+ rev = m.clone(self.repodir, self.wc, update_dest=False)
+ self.assertEquals(rev, None)
+ self.assertEquals(self.revisions, get_revisions(self.wc))
+ self.assertEquals(sorted(os.listdir(self.wc)), ['.hg'])
+
+ def test_clone_into_non_empty_dir(self):
+ m = get_mercurial_vcs_obj()
+ m.mkdir_p(self.wc)
+ open(os.path.join(self.wc, 'test.txt'), 'w').write('hello')
+ m.clone(self.repodir, self.wc, update_dest=False)
+ self.failUnless(not os.path.exists(os.path.join(self.wc, 'test.txt')))
+
+ def test_clone_update(self):
+ m = get_mercurial_vcs_obj()
+ rev = m.clone(self.repodir, self.wc, update_dest=True)
+ self.assertEquals(rev, self.revisions[0])
+
+ def test_clone_branch(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc, branch='branch2',
+ update_dest=False)
+ # On hg 1.6, we should only have a subset of the revisions
+ if m.hg_ver() >= (1, 6, 0):
+ self.assertEquals(self.revisions[1:],
+ get_revisions(self.wc))
+ else:
+ self.assertEquals(self.revisions,
+ get_revisions(self.wc))
+
+ def test_clone_update_branch(self):
+ m = get_mercurial_vcs_obj()
+ rev = m.clone(self.repodir, os.path.join(self.tmpdir, 'wc'),
+ branch="branch2", update_dest=True)
+ self.assertEquals(rev, self.revisions[1], self.revisions)
+
+ def test_clone_revision(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc,
+ revision=self.revisions[0], update_dest=False)
+ # We'll only get a subset of the revisions
+ self.assertEquals(self.revisions[:1] + self.revisions[2:],
+ get_revisions(self.wc))
+
+ def test_update_revision(self):
+ m = get_mercurial_vcs_obj()
+ rev = m.clone(self.repodir, self.wc, update_dest=False)
+ self.assertEquals(rev, None)
+
+ rev = m.update(self.wc, revision=self.revisions[1])
+ self.assertEquals(rev, self.revisions[1])
+
+ def test_pull(self):
+ m = get_mercurial_vcs_obj()
+ # Clone just the first rev
+ m.clone(self.repodir, self.wc, revision=self.revisions[-1], update_dest=False)
+ self.assertEquals(get_revisions(self.wc), self.revisions[-1:])
+
+ # Now pull in new changes
+ rev = m.pull(self.repodir, self.wc, update_dest=False)
+ self.assertEquals(rev, None)
+ self.assertEquals(get_revisions(self.wc), self.revisions)
+
+ def test_pull_revision(self):
+ m = get_mercurial_vcs_obj()
+ # Clone just the first rev
+ m.clone(self.repodir, self.wc, revision=self.revisions[-1], update_dest=False)
+ self.assertEquals(get_revisions(self.wc), self.revisions[-1:])
+
+ # Now pull in just the last revision
+ rev = m.pull(self.repodir, self.wc, revision=self.revisions[0], update_dest=False)
+ self.assertEquals(rev, None)
+
+ # We'll be missing the middle revision (on another branch)
+ self.assertEquals(get_revisions(self.wc), self.revisions[:1] + self.revisions[2:])
+
+ def test_pull_branch(self):
+ m = get_mercurial_vcs_obj()
+ # Clone just the first rev
+ m.clone(self.repodir, self.wc, revision=self.revisions[-1], update_dest=False)
+ self.assertEquals(get_revisions(self.wc), self.revisions[-1:])
+
+ # Now pull in the other branch
+ rev = m.pull(self.repodir, self.wc, branch="branch2", update_dest=False)
+ self.assertEquals(rev, None)
+
+ # On hg 1.6, we'll be missing the last revision (on another branch)
+ if m.hg_ver() >= (1, 6, 0):
+ self.assertEquals(get_revisions(self.wc), self.revisions[1:])
+ else:
+ self.assertEquals(get_revisions(self.wc), self.revisions)
+
+ def test_pull_unrelated(self):
+ m = get_mercurial_vcs_obj()
+ # Create a new repo
+ repo2 = os.path.join(self.tmpdir, 'repo2')
+ self._init_hg_repo(m, repo2)
+
+ self.assertNotEqual(self.revisions, get_revisions(repo2))
+
+ # Clone the original repo
+ m.clone(self.repodir, self.wc, update_dest=False)
+ # Hide the wanted error
+ m.config = {'log_to_console': False}
+ # Try and pull in changes from the new repo
+ self.assertRaises(mercurial.VCSException, m.pull, repo2, self.wc, update_dest=False)
+
+ def test_push(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc, revision=self.revisions[-2])
+ m.push(src=self.repodir, remote=self.wc)
+ self.assertEquals(get_revisions(self.wc), self.revisions)
+
+ def test_push_with_branch(self):
+ m = get_mercurial_vcs_obj()
+ if m.hg_ver() >= (1, 6, 0):
+ m.clone(self.repodir, self.wc, revision=self.revisions[-1])
+ m.push(src=self.repodir, remote=self.wc, branch='branch2')
+ m.push(src=self.repodir, remote=self.wc, branch='default')
+ self.assertEquals(get_revisions(self.wc), self.revisions)
+
+ def test_push_with_revision(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc, revision=self.revisions[-2])
+ m.push(src=self.repodir, remote=self.wc, revision=self.revisions[-1])
+ self.assertEquals(get_revisions(self.wc), self.revisions[-2:])
+
+ def test_mercurial(self):
+ m = get_mercurial_vcs_obj()
+ m.vcs_config = {
+ 'repo': self.repodir,
+ 'dest': self.wc,
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ m.ensure_repo_and_revision()
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[0])
+
+ def test_push_new_branches_not_allowed(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc, revision=self.revisions[0])
+ # Hide the wanted error
+ m.config = {'log_to_console': False}
+ self.assertRaises(Exception, m.push, self.repodir, self.wc, push_new_branches=False)
+
+ def test_mercurial_relative_dir(self):
+ m = get_mercurial_vcs_obj()
+ repo = os.path.basename(self.repodir)
+ wc = os.path.basename(self.wc)
+ m.vcs_config = {
+ 'repo': repo,
+ 'dest': wc,
+ 'revision': self.revisions[-1],
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ m.chdir(os.path.dirname(self.repodir))
+ try:
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[-1])
+ m.info("Creating test.txt")
+ open(os.path.join(self.wc, 'test.txt'), 'w').write("hello!")
+
+ m = get_mercurial_vcs_obj()
+ m.vcs_config = {
+ 'repo': repo,
+ 'dest': wc,
+ 'revision': self.revisions[0],
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[0])
+ # Make sure our local file didn't go away
+ self.failUnless(os.path.exists(os.path.join(self.wc, 'test.txt')))
+ finally:
+ m.chdir(self.pwd)
+
+ def test_mercurial_update_tip(self):
+ m = get_mercurial_vcs_obj()
+ m.vcs_config = {
+ 'repo': self.repodir,
+ 'dest': self.wc,
+ 'revision': self.revisions[-1],
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[-1])
+ open(os.path.join(self.wc, 'test.txt'), 'w').write("hello!")
+
+ m = get_mercurial_vcs_obj()
+ m.vcs_config = {
+ 'repo': self.repodir,
+ 'dest': self.wc,
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[0])
+ # Make sure our local file didn't go away
+ self.failUnless(os.path.exists(os.path.join(self.wc, 'test.txt')))
+
+ def test_mercurial_update_rev(self):
+ m = get_mercurial_vcs_obj()
+ m.vcs_config = {
+ 'repo': self.repodir,
+ 'dest': self.wc,
+ 'revision': self.revisions[-1],
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[-1])
+ open(os.path.join(self.wc, 'test.txt'), 'w').write("hello!")
+
+ m = get_mercurial_vcs_obj()
+ m.vcs_config = {
+ 'repo': self.repodir,
+ 'dest': self.wc,
+ 'revision': self.revisions[0],
+ 'vcs_share_base': os.path.join(self.tmpdir, 'share'),
+ }
+ rev = m.ensure_repo_and_revision()
+ self.assertEquals(rev, self.revisions[0])
+ # Make sure our local file didn't go away
+ self.failUnless(os.path.exists(os.path.join(self.wc, 'test.txt')))
+
+ def test_make_hg_url(self):
+ #construct an hg url specific to revision, branch and filename and try to pull it down
+ file_url = mercurial.make_hg_url(
+ "hg.mozilla.org",
+ '//build/tools/',
+ revision='FIREFOX_3_6_12_RELEASE',
+ filename="/lib/python/util/hg.py",
+ protocol='https',
+ )
+ expected_url = "https://hg.mozilla.org/build/tools/raw-file/FIREFOX_3_6_12_RELEASE/lib/python/util/hg.py"
+ self.assertEquals(file_url, expected_url)
+
+ def test_make_hg_url_no_filename(self):
+ file_url = mercurial.make_hg_url(
+ "hg.mozilla.org",
+ "/build/tools",
+ revision="default",
+ protocol='https',
+ )
+ expected_url = "https://hg.mozilla.org/build/tools/rev/default"
+ self.assertEquals(file_url, expected_url)
+
+ def test_make_hg_url_no_revision_no_filename(self):
+ repo_url = mercurial.make_hg_url(
+ "hg.mozilla.org",
+ "/build/tools",
+ protocol='https',
+ )
+ expected_url = "https://hg.mozilla.org/build/tools"
+ self.assertEquals(repo_url, expected_url)
+
+ def test_make_hg_url_different_protocol(self):
+ repo_url = mercurial.make_hg_url(
+ "hg.mozilla.org",
+ "/build/tools",
+ protocol='ssh',
+ )
+ expected_url = "ssh://hg.mozilla.org/build/tools"
+ self.assertEquals(repo_url, expected_url)
+
+ def test_apply_and_push(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+
+ def c(repo, attempt):
+ m.run_command(HG + ['tag', '-f', 'TEST'], cwd=repo)
+ m.apply_and_push(self.wc, self.repodir, c)
+ self.assertEquals(get_revisions(self.wc), get_revisions(self.repodir))
+
+ def test_apply_and_push_fail(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+
+ def c(repo, attempt, remote):
+ m.run_command(HG + ['tag', '-f', 'TEST'], cwd=repo)
+ m.run_command(HG + ['tag', '-f', 'CONFLICTING_TAG'], cwd=remote)
+ m.config = {'log_to_console': False}
+ self.assertRaises(errors.VCSException, m.apply_and_push, self.wc,
+ self.repodir, lambda r, a: c(r, a, self.repodir),
+ max_attempts=2)
+
+ def test_apply_and_push_with_rebase(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+ m.config = {'log_to_console': False}
+
+ def c(repo, attempt, remote):
+ m.run_command(HG + ['tag', '-f', 'TEST'], cwd=repo)
+ if attempt == 1:
+ m.run_command(HG + ['rm', 'hello.txt'], cwd=remote)
+ m.run_command(HG + ['commit', '-m', 'test'], cwd=remote)
+ m.apply_and_push(self.wc, self.repodir,
+ lambda r, a: c(r, a, self.repodir), max_attempts=2)
+ self.assertEquals(get_revisions(self.wc), get_revisions(self.repodir))
+
+ def test_apply_and_push_rebase_fails(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+ m.config = {'log_to_console': False}
+
+ def c(repo, attempt, remote):
+ m.run_command(HG + ['tag', '-f', 'TEST'], cwd=repo)
+ if attempt in (1, 2):
+ m.run_command(HG + ['tag', '-f', 'CONFLICTING_TAG'], cwd=remote)
+ m.apply_and_push(self.wc, self.repodir,
+ lambda r, a: c(r, a, self.repodir), max_attempts=4)
+ self.assertEquals(get_revisions(self.wc), get_revisions(self.repodir))
+
+ def test_apply_and_push_on_branch(self):
+ m = get_mercurial_vcs_obj()
+ if m.hg_ver() >= (1, 6, 0):
+ m.clone(self.repodir, self.wc)
+
+ def c(repo, attempt):
+ m.run_command(HG + ['branch', 'branch3'], cwd=repo)
+ m.run_command(HG + ['tag', '-f', 'TEST'], cwd=repo)
+ m.apply_and_push(self.wc, self.repodir, c)
+ self.assertEquals(get_revisions(self.wc), get_revisions(self.repodir))
+
+ def test_apply_and_push_with_no_change(self):
+ m = get_mercurial_vcs_obj()
+ m.clone(self.repodir, self.wc)
+
+ def c(r, a):
+ pass
+ self.assertRaises(errors.VCSException, m.apply_and_push, self.wc, self.repodir, c)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_l10n_locales.py b/testing/mozharness/test/test_l10n_locales.py
new file mode 100644
index 000000000..e8372a9fb
--- /dev/null
+++ b/testing/mozharness/test/test_l10n_locales.py
@@ -0,0 +1,132 @@
+import os
+import shutil
+import subprocess
+import sys
+import unittest
+
+import mozharness.base.log as log
+import mozharness.base.script as script
+import mozharness.mozilla.l10n.locales as locales
+
+ALL_LOCALES = ['ar', 'be', 'de', 'es-ES']
+
+MH_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+def cleanup():
+ if os.path.exists('test_logs'):
+ shutil.rmtree('test_logs')
+
+class LocalesTest(locales.LocalesMixin, script.BaseScript):
+ def __init__(self, **kwargs):
+ if 'config' not in kwargs:
+ kwargs['config'] = {'log_type': 'simple',
+ 'log_level': 'error'}
+ if 'initial_config_file' not in kwargs:
+ kwargs['initial_config_file'] = 'test/test.json'
+ super(LocalesTest, self).__init__(**kwargs)
+ self.config = {}
+ self.log_obj = None
+
+class TestLocalesMixin(unittest.TestCase):
+ BASE_ABS_DIRS = ['abs_compare_locales_dir', 'abs_log_dir',
+ 'abs_upload_dir', 'abs_work_dir', 'base_work_dir']
+ def setUp(self):
+ cleanup()
+
+ def tearDown(self):
+ cleanup()
+
+ def test_query_locales_locales(self):
+ l = LocalesTest()
+ l.locales = ['a', 'b', 'c']
+ self.assertEqual(l.locales, l.query_locales())
+
+ def test_query_locales_ignore_locales(self):
+ l = LocalesTest()
+ l.config['locales'] = ['a', 'b', 'c']
+ l.config['ignore_locales'] = ['a', 'c']
+ self.assertEqual(['b'], l.query_locales())
+
+ def test_query_locales_config(self):
+ l = LocalesTest()
+ l.config['locales'] = ['a', 'b', 'c']
+ self.assertEqual(l.config['locales'], l.query_locales())
+
+ def test_query_locales_json(self):
+ l = LocalesTest()
+ l.config['locales_file'] = os.path.join(MH_DIR, "test/helper_files/locales.json")
+ l.config['base_work_dir'] = '.'
+ l.config['work_dir'] = '.'
+ locales = l.query_locales()
+ locales.sort()
+ self.assertEqual(ALL_LOCALES, locales)
+
+# Commenting out til we can hide the FATAL ?
+# def test_query_locales_no_file(self):
+# l = LocalesTest()
+# l.config['base_work_dir'] = '.'
+# l.config['work_dir'] = '.'
+# try:
+# l.query_locales()
+# except SystemExit:
+# pass # Good
+# else:
+# self.assertTrue(False, "query_locales with no file doesn't fatal()!")
+
+ def test_parse_locales_file(self):
+ l = LocalesTest()
+ self.assertEqual(ALL_LOCALES, l.parse_locales_file(os.path.join(MH_DIR, 'test/helper_files/locales.txt')))
+
+ def _get_query_abs_dirs_obj(self):
+ l = LocalesTest()
+ l.config['base_work_dir'] = "base_work_dir"
+ l.config['work_dir'] = "work_dir"
+ return l
+
+ def test_query_abs_dirs_base(self):
+ l = self._get_query_abs_dirs_obj()
+ dirs = l.query_abs_dirs().keys()
+ dirs.sort()
+ self.assertEqual(dirs, self.BASE_ABS_DIRS)
+
+ def test_query_abs_dirs_base2(self):
+ l = self._get_query_abs_dirs_obj()
+ l.query_abs_dirs().keys()
+ dirs = l.query_abs_dirs().keys()
+ dirs.sort()
+ self.assertEqual(dirs, self.BASE_ABS_DIRS)
+
+ def test_query_abs_dirs_l10n(self):
+ l = self._get_query_abs_dirs_obj()
+ l.config['l10n_dir'] = "l10n_dir"
+ dirs = l.query_abs_dirs().keys()
+ dirs.sort()
+ expected_dirs = self.BASE_ABS_DIRS + ['abs_l10n_dir']
+ expected_dirs.sort()
+ self.assertEqual(dirs, expected_dirs)
+
+ def test_query_abs_dirs_mozilla(self):
+ l = self._get_query_abs_dirs_obj()
+ l.config['l10n_dir'] = "l10n_dir"
+ l.config['mozilla_dir'] = "mozilla_dir"
+ l.config['locales_dir'] = "locales_dir"
+ dirs = l.query_abs_dirs().keys()
+ dirs.sort()
+ expected_dirs = self.BASE_ABS_DIRS + ['abs_mozilla_dir', 'abs_locales_src_dir', 'abs_l10n_dir']
+ expected_dirs.sort()
+ self.assertEqual(dirs, expected_dirs)
+
+ def test_query_abs_dirs_objdir(self):
+ l = self._get_query_abs_dirs_obj()
+ l.config['l10n_dir'] = "l10n_dir"
+ l.config['mozilla_dir'] = "mozilla_dir"
+ l.config['locales_dir'] = "locales_dir"
+ l.config['objdir'] = "objdir"
+ dirs = l.query_abs_dirs().keys()
+ dirs.sort()
+ expected_dirs = self.BASE_ABS_DIRS + ['abs_mozilla_dir', 'abs_locales_src_dir', 'abs_l10n_dir', 'abs_objdir', 'abs_merge_dir', 'abs_locales_dir']
+ expected_dirs.sort()
+ self.assertEqual(dirs, expected_dirs)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_mozilla_blob_upload.py b/testing/mozharness/test/test_mozilla_blob_upload.py
new file mode 100644
index 000000000..4918d6c73
--- /dev/null
+++ b/testing/mozharness/test/test_mozilla_blob_upload.py
@@ -0,0 +1,103 @@
+import os
+import gc
+import unittest
+import copy
+import mock
+
+import mozharness.base.log as log
+from mozharness.base.log import ERROR
+import mozharness.base.script as script
+from mozharness.mozilla.blob_upload import BlobUploadMixin, \
+ blobupload_config_options
+
+class CleanupObj(script.ScriptMixin, log.LogMixin):
+ def __init__(self):
+ super(CleanupObj, self).__init__()
+ self.log_obj = None
+ self.config = {'log_level': ERROR}
+
+
+def cleanup():
+ gc.collect()
+ c = CleanupObj()
+ for f in ('test_logs', 'test_dir', 'tmpfile_stdout', 'tmpfile_stderr'):
+ c.rmtree(f)
+
+
+class BlobUploadScript(BlobUploadMixin, script.BaseScript):
+ config_options = copy.deepcopy(blobupload_config_options)
+ def __init__(self, **kwargs):
+ self.abs_dirs = None
+ self.set_buildbot_property = mock.Mock()
+ super(BlobUploadScript, self).__init__(
+ config_options=self.config_options,
+ **kwargs
+ )
+
+ def query_python_path(self, binary="python"):
+ if binary == "blobberc.py":
+ return mock.Mock(return_value='/path/to/blobberc').return_value
+ elif binary == "python":
+ return mock.Mock(return_value='/path/to/python').return_value
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(BlobUploadScript, self).query_abs_dirs()
+ dirs = {}
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'],
+ 'blobber_upload_dir')
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def run_command(self, command):
+ self.command = command
+
+# TestBlobUploadMechanism {{{1
+class TestBlobUploadMechanism(unittest.TestCase):
+ # I need a log watcher helper function, here and in test_log.
+ def setUp(self):
+ cleanup()
+ self.s = None
+
+ def tearDown(self):
+ # Close the logfile handles, or windows can't remove the logs
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del(self.s)
+ cleanup()
+
+ def test_blob_upload_mechanism(self):
+ self.s = BlobUploadScript(config={'log_type': 'multi',
+ 'blob_upload_branch': 'test-branch',
+ 'default_blob_upload_servers':
+ ['http://blob_server.me'],
+ 'blob_uploader_auth_file':
+ os.path.abspath(__file__)},
+ initial_config_file='test/test.json')
+
+ content = "Hello world!"
+ parent_dir = self.s.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.isdir(parent_dir):
+ self.s.mkdir_p(parent_dir)
+
+ file_name = os.path.join(parent_dir, 'test_mock_blob_file')
+ self.s.write_to_file(file_name, content)
+ self.s.upload_blobber_files()
+ self.assertTrue(self.s.set_buildbot_property.called)
+
+ expected_result = ['/path/to/python', '/path/to/blobberc', '-u',
+ 'http://blob_server.me', '-a',
+ os.path.abspath(__file__), '-b', 'test-branch', '-d']
+ expected_result.append(self.s.query_abs_dirs()['abs_blob_upload_dir'])
+ expected_result += [
+ '--output-manifest',
+ os.path.join(self.s.query_abs_dirs()['abs_work_dir'], "uploaded_files.json")
+ ]
+ self.assertEqual(expected_result, self.s.command)
+
+
+# main {{{1
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_mozilla_buildbot.py b/testing/mozharness/test/test_mozilla_buildbot.py
new file mode 100644
index 000000000..afc715026
--- /dev/null
+++ b/testing/mozharness/test/test_mozilla_buildbot.py
@@ -0,0 +1,62 @@
+import gc
+import unittest
+
+
+import mozharness.base.log as log
+from mozharness.base.log import ERROR
+import mozharness.base.script as script
+from mozharness.mozilla.buildbot import BuildbotMixin, TBPL_SUCCESS, \
+ TBPL_FAILURE, EXIT_STATUS_DICT
+
+
+class CleanupObj(script.ScriptMixin, log.LogMixin):
+ def __init__(self):
+ super(CleanupObj, self).__init__()
+ self.log_obj = None
+ self.config = {'log_level': ERROR}
+
+
+def cleanup():
+ gc.collect()
+ c = CleanupObj()
+ for f in ('test_logs', 'test_dir', 'tmpfile_stdout', 'tmpfile_stderr'):
+ c.rmtree(f)
+
+
+class BuildbotScript(BuildbotMixin, script.BaseScript):
+ def __init__(self, **kwargs):
+ super(BuildbotScript, self).__init__(**kwargs)
+
+
+# TestBuildbotStatus {{{1
+class TestBuildbotStatus(unittest.TestCase):
+ # I need a log watcher helper function, here and in test_log.
+ def setUp(self):
+ cleanup()
+ self.s = None
+
+ def tearDown(self):
+ # Close the logfile handles, or windows can't remove the logs
+ if hasattr(self, 's') and isinstance(self.s, object):
+ del(self.s)
+ cleanup()
+
+ def test_over_max_log_size(self):
+ self.s = BuildbotScript(config={'log_type': 'multi',
+ 'buildbot_max_log_size': 200},
+ initial_config_file='test/test.json')
+ self.s.info("foo!")
+ self.s.buildbot_status(TBPL_SUCCESS)
+ self.assertEqual(self.s.return_code, EXIT_STATUS_DICT[TBPL_FAILURE])
+
+ def test_under_max_log_size(self):
+ self.s = BuildbotScript(config={'log_type': 'multi',
+ 'buildbot_max_log_size': 20000},
+ initial_config_file='test/test.json')
+ self.s.info("foo!")
+ self.s.buildbot_status(TBPL_SUCCESS)
+ self.assertEqual(self.s.return_code, EXIT_STATUS_DICT[TBPL_SUCCESS])
+
+# main {{{1
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozharness/test/test_mozilla_release.py b/testing/mozharness/test/test_mozilla_release.py
new file mode 100644
index 000000000..adbe322c4
--- /dev/null
+++ b/testing/mozharness/test/test_mozilla_release.py
@@ -0,0 +1,42 @@
+import unittest
+from mozharness.mozilla.release import get_previous_version
+
+
+class TestGetPreviousVersion(unittest.TestCase):
+ def testESR(self):
+ self.assertEquals(
+ '31.5.3esr',
+ get_previous_version('31.6.0esr',
+ ['31.5.3esr', '31.5.2esr', '31.4.0esr']))
+
+ def testReleaseBuild1(self):
+ self.assertEquals(
+ '36.0.4',
+ get_previous_version('37.0', ['36.0.4', '36.0.1', '35.0.1']))
+
+ def testReleaseBuild2(self):
+ self.assertEquals(
+ '36.0.4',
+ get_previous_version('37.0',
+ ['37.0', '36.0.4', '36.0.1', '35.0.1']))
+
+ def testBetaMidCycle(self):
+ self.assertEquals(
+ '37.0b4',
+ get_previous_version('37.0b5', ['37.0b4', '37.0b3']))
+
+ def testBetaEarlyCycle(self):
+ # 37.0 is the RC build
+ self.assertEquals(
+ '38.0b1',
+ get_previous_version('38.0b2', ['38.0b1', '37.0']))
+
+ def testBetaFirstInCycle(self):
+ self.assertEquals(
+ '37.0',
+ get_previous_version('38.0b1', ['37.0', '37.0b7']))
+
+ def testTwoDots(self):
+ self.assertEquals(
+ '37.1.0',
+ get_previous_version('38.0b1', ['37.1.0', '36.0']))
diff --git a/testing/mozharness/tox.ini b/testing/mozharness/tox.ini
new file mode 100644
index 000000000..e2e1c3009
--- /dev/null
+++ b/testing/mozharness/tox.ini
@@ -0,0 +1,27 @@
+[tox]
+envlist = py27-hg3.7
+
+[base]
+deps =
+ coverage
+ nose
+ rednose
+
+[testenv]
+basepython = python2.7
+setenv =
+ HGRCPATH = {toxinidir}/test/hgrc
+
+commands =
+ coverage run --source configs,mozharness,scripts --branch {envbindir}/nosetests -v --with-xunit --rednose --force-color {posargs}
+
+[testenv:py27-hg3.7]
+deps =
+ {[base]deps}
+ mercurial==3.7.3
+
+[testenv:py27-coveralls]
+deps=
+ python-coveralls==2.4.3
+commands=
+ coveralls
diff --git a/testing/mozharness/unit.sh b/testing/mozharness/unit.sh
new file mode 100755
index 000000000..a4a27a837
--- /dev/null
+++ b/testing/mozharness/unit.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+###########################################################################
+# This requires coverage and nosetests:
+#
+# pip install -r requirements.txt
+#
+# test_base_vcs_mercurial.py requires hg >= 1.6.0 with mq, rebase, share
+# extensions to fully test.
+###########################################################################
+
+COVERAGE_ARGS="--omit='/usr/*,/opt/*'"
+OS_TYPE='linux'
+uname -v | grep -q Darwin
+if [ $? -eq 0 ] ; then
+ OS_TYPE='osx'
+ COVERAGE_ARGS="--omit='/Library/*,/usr/*,/opt/*'"
+fi
+uname -s | egrep -q MINGW32 # Cygwin will be linux in this case?
+if [ $? -eq 0 ] ; then
+ OS_TYPE='windows'
+fi
+NOSETESTS=`env which nosetests`
+
+echo "### Finding mozharness/ .py files..."
+files=`find mozharness -name [a-z]\*.py`
+if [ $OS_TYPE == 'windows' ] ; then
+ MOZHARNESS_PY_FILES=""
+ for f in $files; do
+ file $f | grep -q "Assembler source"
+ if [ $? -ne 0 ] ; then
+ MOZHARNESS_PY_FILES="$MOZHARNESS_PY_FILES $f"
+ fi
+ done
+else
+ MOZHARNESS_PY_FILES=$files
+fi
+echo "### Finding scripts/ .py files..."
+files=`find scripts -name [a-z]\*.py`
+if [ $OS_TYPE == 'windows' ] ; then
+ SCRIPTS_PY_FILES=""
+ for f in $files; do
+ file $f | grep -q "Assembler source"
+ if [ $? -ne 0 ] ; then
+ SCRIPTS_PY_FILES="$SCRIPTS_PY_FILES $f"
+ fi
+ done
+else
+ SCRIPTS_PY_FILES=$files
+fi
+export PYTHONPATH=`env pwd`:$PYTHONPATH
+
+echo "### Running pyflakes"
+pyflakes $MOZHARNESS_PY_FILES $SCRIPTS_PY_FILES | grep -v "local variable 'url' is assigned to" | grep -v "redefinition of unused 'json'" | egrep -v "mozharness/mozilla/testing/mozpool\.py.*undefined name 'requests'"
+
+echo "### Running pylint"
+pylint -E -e F -f parseable $MOZHARNESS_PY_FILES $SCRIPTS_PY_FILES 2>&1 | egrep -v '(No config file found, using default configuration|Instance of .* has no .* member|Unable to import .devicemanager|Undefined variable .DMError|Module .hashlib. has no .sha512. member)'
+
+rm -rf build logs
+if [ $OS_TYPE != 'windows' ] ; then
+ echo "### Testing non-networked unit tests"
+ coverage run -a --branch $COVERAGE_ARGS $NOSETESTS test/test_*.py
+ echo "### Running *.py [--list-actions]"
+ for filename in $MOZHARNESS_PY_FILES; do
+ coverage run -a --branch $COVERAGE_ARGS $filename
+ done
+ for filename in $SCRIPTS_PY_FILES ; do
+ coverage run -a --branch $COVERAGE_ARGS $filename --list-actions > /dev/null
+ done
+ echo "### Running scripts/configtest.py --log-level warning"
+ coverage run -a --branch $COVERAGE_ARGS scripts/configtest.py --log-level warning
+
+ echo "### Creating coverage html"
+ coverage html $COVERAGE_ARGS -d coverage.new
+ if [ -e coverage ] ; then
+ mv coverage coverage.old
+ mv coverage.new coverage
+ rm -rf coverage.old
+ else
+ mv coverage.new coverage
+ fi
+else
+ echo "### Running nosetests..."
+ nosetests test/
+fi
+rm -rf build logs