summaryrefslogtreecommitdiffstats
path: root/media/webrtc/trunk/build
diff options
context:
space:
mode:
Diffstat (limited to 'media/webrtc/trunk/build')
-rw-r--r--media/webrtc/trunk/build/OWNERS1
-rw-r--r--media/webrtc/trunk/build/README.chromium15
-rw-r--r--media/webrtc/trunk/build/all.gyp716
-rw-r--r--media/webrtc/trunk/build/all_android.gyp115
-rwxr-xr-xmedia/webrtc/trunk/build/android/adb_device_functions.sh139
-rwxr-xr-xmedia/webrtc/trunk/build/android/adb_kill_content_shell24
-rwxr-xr-xmedia/webrtc/trunk/build/android/adb_logcat_monitor.py155
-rwxr-xr-xmedia/webrtc/trunk/build/android/adb_logcat_printer.py202
-rwxr-xr-xmedia/webrtc/trunk/build/android/adb_run_content_shell14
-rw-r--r--media/webrtc/trunk/build/android/ant/chromium-jars.xml97
-rw-r--r--media/webrtc/trunk/build/android/ant/common.xml90
-rw-r--r--media/webrtc/trunk/build/android/ant/sdk-targets.xml284
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_fyi_builder.sh9
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_fyi_tester.sh9
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_main.sh9
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_try_builder.sh9
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_try_compile.sh9
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_try_tester.sh9
-rwxr-xr-xmedia/webrtc/trunk/build/android/buildbot_webkit_main.sh9
-rw-r--r--media/webrtc/trunk/build/android/cpufeatures.gypi6
-rwxr-xr-xmedia/webrtc/trunk/build/android/device_stats_monitor.py43
-rwxr-xr-xmedia/webrtc/trunk/build/android/device_status_check.py170
-rwxr-xr-xmedia/webrtc/trunk/build/android/emulator.py321
-rwxr-xr-xmedia/webrtc/trunk/build/android/enable_asserts.py31
-rwxr-xr-xmedia/webrtc/trunk/build/android/envsetup.sh133
-rwxr-xr-xmedia/webrtc/trunk/build/android/envsetup_functions.sh334
-rwxr-xr-xmedia/webrtc/trunk/build/android/gdb_apk171
-rwxr-xr-xmedia/webrtc/trunk/build/android/gdb_content_shell15
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/base_unittests_disabled29
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/base_unittests_emulator_additional_disabled10
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/content_unittests_disabled21
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/ipc_tests_disabled14
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/media_unittests_disabled11
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/net_unittests_disabled124
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/sync_unit_tests_disabled4
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/ui_unittests_disabled44
-rw-r--r--media/webrtc/trunk/build/android/gtest_filter/unit_tests_disabled149
-rwxr-xr-xmedia/webrtc/trunk/build/android/lighttpd_server.py253
-rw-r--r--media/webrtc/trunk/build/android/pylib/__init__.py4
-rw-r--r--media/webrtc/trunk/build/android/pylib/android_commands.py1071
-rw-r--r--media/webrtc/trunk/build/android/pylib/apk_info.py142
-rw-r--r--media/webrtc/trunk/build/android/pylib/base_test_runner.py210
-rw-r--r--media/webrtc/trunk/build/android/pylib/base_test_sharder.py113
-rw-r--r--media/webrtc/trunk/build/android/pylib/buildbot_report.py46
-rw-r--r--media/webrtc/trunk/build/android/pylib/chrome_test_server_spawner.py402
-rw-r--r--media/webrtc/trunk/build/android/pylib/cmd_helper.py50
-rw-r--r--media/webrtc/trunk/build/android/pylib/constants.py47
-rw-r--r--media/webrtc/trunk/build/android/pylib/debug_info.py196
-rw-r--r--media/webrtc/trunk/build/android/pylib/device_stats_monitor.html143
-rw-r--r--media/webrtc/trunk/build/android/pylib/device_stats_monitor.py116
-rw-r--r--media/webrtc/trunk/build/android/pylib/fake_dns.py63
-rw-r--r--media/webrtc/trunk/build/android/pylib/flag_changer.py144
-rw-r--r--media/webrtc/trunk/build/android/pylib/forwarder.py198
-rw-r--r--media/webrtc/trunk/build/android/pylib/io_stats_parser.py32
-rw-r--r--media/webrtc/trunk/build/android/pylib/java_unittest_utils.py27
-rw-r--r--media/webrtc/trunk/build/android/pylib/json_perf_parser.py160
-rw-r--r--media/webrtc/trunk/build/android/pylib/perf_tests_helper.py120
-rw-r--r--media/webrtc/trunk/build/android/pylib/ports.py167
-rw-r--r--media/webrtc/trunk/build/android/pylib/python_test_base.py168
-rw-r--r--media/webrtc/trunk/build/android/pylib/python_test_caller.py84
-rw-r--r--media/webrtc/trunk/build/android/pylib/python_test_sharder.py203
-rw-r--r--media/webrtc/trunk/build/android/pylib/run_java_tests.py591
-rw-r--r--media/webrtc/trunk/build/android/pylib/run_python_tests.py207
-rw-r--r--media/webrtc/trunk/build/android/pylib/run_tests_helper.py26
-rw-r--r--media/webrtc/trunk/build/android/pylib/sharded_tests_queue.py35
-rw-r--r--media/webrtc/trunk/build/android/pylib/single_test_runner.py343
-rw-r--r--media/webrtc/trunk/build/android/pylib/test_info_collection.py137
-rw-r--r--media/webrtc/trunk/build/android/pylib/test_options_parser.py143
-rw-r--r--media/webrtc/trunk/build/android/pylib/test_package.py200
-rw-r--r--media/webrtc/trunk/build/android/pylib/test_package_apk.py121
-rw-r--r--media/webrtc/trunk/build/android/pylib/test_package_executable.py167
-rw-r--r--media/webrtc/trunk/build/android/pylib/test_result.py193
-rw-r--r--media/webrtc/trunk/build/android/pylib/tests_annotations.py89
-rw-r--r--media/webrtc/trunk/build/android/pylib/valgrind_tools.py255
-rwxr-xr-xmedia/webrtc/trunk/build/android/run_instrumentation_tests.py92
-rwxr-xr-xmedia/webrtc/trunk/build/android/run_tests.py450
-rw-r--r--media/webrtc/trunk/build/apk_test.gypi75
-rwxr-xr-xmedia/webrtc/trunk/build/apply_locales.py45
-rw-r--r--media/webrtc/trunk/build/asan.saves23
-rwxr-xr-xmedia/webrtc/trunk/build/branding_value.sh51
-rw-r--r--media/webrtc/trunk/build/build_config.h201
-rw-r--r--media/webrtc/trunk/build/common.croc127
-rw-r--r--media/webrtc/trunk/build/common.gypi3669
-rw-r--r--media/webrtc/trunk/build/common_untrusted.gypi29
-rwxr-xr-xmedia/webrtc/trunk/build/compiler_version.py76
-rw-r--r--media/webrtc/trunk/build/copy_test_data_ios.gypi48
-rwxr-xr-xmedia/webrtc/trunk/build/copy_test_data_ios.py104
-rwxr-xr-xmedia/webrtc/trunk/build/cp.py22
-rwxr-xr-xmedia/webrtc/trunk/build/dir_exists.py15
-rwxr-xr-xmedia/webrtc/trunk/build/download_nacl_toolchains.py64
-rwxr-xr-xmedia/webrtc/trunk/build/escape_unicode.py56
-rwxr-xr-xmedia/webrtc/trunk/build/extract_from_cab.py63
-rw-r--r--media/webrtc/trunk/build/filename_rules.gypi96
-rwxr-xr-xmedia/webrtc/trunk/build/gdb-add-index47
-rw-r--r--media/webrtc/trunk/build/grit_action.gypi33
-rw-r--r--media/webrtc/trunk/build/grit_target.gypi30
-rwxr-xr-xmedia/webrtc/trunk/build/gyp_chromium175
-rwxr-xr-xmedia/webrtc/trunk/build/install-build-deps-android.sh101
-rwxr-xr-xmedia/webrtc/trunk/build/install-build-deps.sh414
-rwxr-xr-xmedia/webrtc/trunk/build/install-chroot.sh809
-rw-r--r--media/webrtc/trunk/build/internal/README.chromium24
-rw-r--r--media/webrtc/trunk/build/internal/release_defaults.gypi18
-rw-r--r--media/webrtc/trunk/build/internal/release_impl.gypi17
-rw-r--r--media/webrtc/trunk/build/internal/release_impl_official.gypi43
-rwxr-xr-xmedia/webrtc/trunk/build/ios/clean_env.py77
-rw-r--r--media/webrtc/trunk/build/ios/mac_build.gypi79
-rw-r--r--media/webrtc/trunk/build/jar_file_jni_generator.gypi53
-rw-r--r--media/webrtc/trunk/build/java.gypi90
-rw-r--r--media/webrtc/trunk/build/java_aidl.gypi58
-rw-r--r--media/webrtc/trunk/build/jni_generator.gypi58
-rw-r--r--media/webrtc/trunk/build/json_schema_bundle_compile.gypi62
-rw-r--r--media/webrtc/trunk/build/json_schema_compile.gypi110
-rw-r--r--media/webrtc/trunk/build/linux/chrome_linux.croc29
-rwxr-xr-xmedia/webrtc/trunk/build/linux/dump_app_syms36
-rwxr-xr-xmedia/webrtc/trunk/build/linux/pkg-config-wrapper47
-rwxr-xr-xmedia/webrtc/trunk/build/linux/python_arch.sh42
-rwxr-xr-xmedia/webrtc/trunk/build/linux/rewrite_dirs.py71
-rw-r--r--media/webrtc/trunk/build/linux/system.gyp637
-rw-r--r--media/webrtc/trunk/build/mac/OWNERS2
-rwxr-xr-xmedia/webrtc/trunk/build/mac/change_mach_o_flags.py273
-rwxr-xr-xmedia/webrtc/trunk/build/mac/change_mach_o_flags_from_xcode.sh15
-rw-r--r--media/webrtc/trunk/build/mac/chrome_mac.croc36
-rwxr-xr-xmedia/webrtc/trunk/build/mac/copy_framework_unversioned.sh118
-rwxr-xr-xmedia/webrtc/trunk/build/mac/find_sdk.py83
-rwxr-xr-xmedia/webrtc/trunk/build/mac/make_more_helpers.sh91
-rwxr-xr-xmedia/webrtc/trunk/build/mac/strip_from_xcode62
-rwxr-xr-xmedia/webrtc/trunk/build/mac/strip_save_dsym341
-rwxr-xr-xmedia/webrtc/trunk/build/mac/tweak_info_plist.py293
-rwxr-xr-xmedia/webrtc/trunk/build/mac/verify_no_objc.sh43
-rw-r--r--media/webrtc/trunk/build/nocompile.gypi96
-rw-r--r--media/webrtc/trunk/build/output_dll_copy.rules17
-rw-r--r--media/webrtc/trunk/build/precompile.cc7
-rw-r--r--media/webrtc/trunk/build/precompile.h108
-rw-r--r--media/webrtc/trunk/build/protoc.gypi116
-rw-r--r--media/webrtc/trunk/build/release.gypi17
-rwxr-xr-xmedia/webrtc/trunk/build/sanitize-mac-build-log.sed35
-rwxr-xr-xmedia/webrtc/trunk/build/sanitize-mac-build-log.sh6
-rwxr-xr-xmedia/webrtc/trunk/build/sanitize-win-build-log.sed17
-rwxr-xr-xmedia/webrtc/trunk/build/sanitize-win-build-log.sh6
-rw-r--r--media/webrtc/trunk/build/some.gyp24
-rw-r--r--media/webrtc/trunk/build/temp_gyp/README.chromium3
-rw-r--r--media/webrtc/trunk/build/temp_gyp/googleurl.gyp105
-rw-r--r--media/webrtc/trunk/build/temp_gyp/pdfsqueeze.gyp40
-rwxr-xr-xmedia/webrtc/trunk/build/update-linux-sandbox.sh75
-rwxr-xr-xmedia/webrtc/trunk/build/util/lastchange.py230
-rw-r--r--media/webrtc/trunk/build/whitespace_file.txt69
-rw-r--r--media/webrtc/trunk/build/win/chrome_win.croc26
-rwxr-xr-xmedia/webrtc/trunk/build/win/install-build-deps.py47
-rw-r--r--media/webrtc/trunk/build/win/setup_cygwin_mount.py20
-rw-r--r--media/webrtc/trunk/build/win_precompile.gypi20
150 files changed, 21118 insertions, 0 deletions
diff --git a/media/webrtc/trunk/build/OWNERS b/media/webrtc/trunk/build/OWNERS
new file mode 100644
index 000000000..72e8ffc0d
--- /dev/null
+++ b/media/webrtc/trunk/build/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/media/webrtc/trunk/build/README.chromium b/media/webrtc/trunk/build/README.chromium
new file mode 100644
index 000000000..012df35c7
--- /dev/null
+++ b/media/webrtc/trunk/build/README.chromium
@@ -0,0 +1,15 @@
+List of property sheets to be included by projects:
+ common.vsprops
+ Not used anymore. No-op. Kept for compatibility with current projects.
+
+ debug.vsprops
+ Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
+
+ external_code.vsprops
+ Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
+
+ output_dll_copy.rules
+ Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
+
+ release.vsprops
+ Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/media/webrtc/trunk/build/all.gyp b/media/webrtc/trunk/build/all.gyp
new file mode 100644
index 000000000..f402ef233
--- /dev/null
+++ b/media/webrtc/trunk/build/all.gyp
@@ -0,0 +1,716 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'All',
+ 'type': 'none',
+ 'xcode_create_dependents_test_runner': 1,
+ 'dependencies': [
+ 'some.gyp:*',
+ '../base/base.gyp:*',
+ '../content/content.gyp:*',
+ '../crypto/crypto.gyp:*',
+ '../media/media.gyp:*',
+ '../net/net.gyp:*',
+ '../sdch/sdch.gyp:*',
+ '../sql/sql.gyp:*',
+ '../testing/gmock.gyp:*',
+ '../testing/gtest.gyp:*',
+ '../third_party/bzip2/bzip2.gyp:*',
+ '../third_party/libxml/libxml.gyp:*',
+ '../third_party/sqlite/sqlite.gyp:*',
+ '../third_party/zlib/zlib.gyp:*',
+ '../ui/ui.gyp:*',
+ '../webkit/support/webkit_support.gyp:*',
+ 'temp_gyp/googleurl.gyp:*',
+ ],
+ 'conditions': [
+ ['OS!="ios"', {
+ 'dependencies': [
+ '../cc/cc_tests.gyp:*',
+ '../chrome/chrome.gyp:*',
+ '../gpu/gpu.gyp:*',
+ '../gpu/tools/tools.gyp:*',
+ '../ipc/ipc.gyp:*',
+ '../jingle/jingle.gyp:*',
+ '../ppapi/ppapi.gyp:*',
+ '../ppapi/ppapi_internal.gyp:*',
+ '../printing/printing.gyp:*',
+ '../skia/skia.gyp:*',
+ '../sync/sync.gyp:*',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
+ '../third_party/cld/cld.gyp:*',
+ '../third_party/codesighs/codesighs.gyp:*',
+ '../third_party/ffmpeg/ffmpeg.gyp:*',
+ '../third_party/iccjpeg/iccjpeg.gyp:*',
+ '../third_party/libpng/libpng.gyp:*',
+ '../third_party/libusb/libusb.gyp:*',
+ '../third_party/libwebp/libwebp.gyp:*',
+ '../third_party/libxslt/libxslt.gyp:*',
+ '../third_party/lzma_sdk/lzma_sdk.gyp:*',
+ '../third_party/mesa/mesa.gyp:*',
+ '../third_party/modp_b64/modp_b64.gyp:*',
+ '../third_party/npapi/npapi.gyp:*',
+ '../third_party/ots/ots.gyp:*',
+ '../third_party/qcms/qcms.gyp:*',
+ '../third_party/re2/re2.gyp:re2',
+ '../third_party/WebKit/Source/WebKit/chromium/All.gyp:*',
+ '../v8/tools/gyp/v8.gyp:*',
+ '../webkit/compositor_bindings/compositor_bindings_tests.gyp:*',
+ '../webkit/webkit.gyp:*',
+ '<(libjpeg_gyp_path):*',
+ ],
+ }],
+ ['os_posix==1 and OS!="android" and OS!="ios"', {
+ 'dependencies': [
+ '../third_party/yasm/yasm.gyp:*#host',
+ ],
+ }],
+ ['OS=="mac" or OS=="ios" or OS=="win"', {
+ 'dependencies': [
+ '../third_party/nss/nss.gyp:*',
+ ],
+ }],
+ ['OS=="win" or OS=="ios" or OS=="linux"', {
+ 'dependencies': [
+ '../breakpad/breakpad.gyp:*',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'dependencies': [
+ '../third_party/ocmock/ocmock.gyp:*',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'dependencies': [
+ '../courgette/courgette.gyp:*',
+ '../dbus/dbus.gyp:*',
+ '../sandbox/sandbox.gyp:*',
+ ],
+ 'conditions': [
+ ['branding=="Chrome"', {
+ 'dependencies': [
+ '../chrome/chrome.gyp:linux_packages_<(channel)',
+ ],
+ }],
+ ['chromeos==0', {
+ 'dependencies': [
+ '../third_party/cros_dbus_cplusplus/cros_dbus_cplusplus.gyp:*',
+ ],
+ }],
+ ],
+ }],
+ ['(toolkit_uses_gtk==1) and (build_with_mozilla==0)', {
+ 'dependencies': [
+ '../tools/gtk_clipboard_dump/gtk_clipboard_dump.gyp:*',
+ '../tools/xdisplaycheck/xdisplaycheck.gyp:*',
+ ],
+ }],
+ ['OS=="win"', {
+ 'conditions': [
+ ['win_use_allocator_shim==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:*',
+ ],
+ }],
+ ],
+ 'dependencies': [
+ '../chrome_frame/chrome_frame.gyp:*',
+ '../cloud_print/cloud_print.gyp:*',
+ '../courgette/courgette.gyp:*',
+ '../rlz/rlz.gyp:*',
+ '../sandbox/sandbox.gyp:*',
+ '../third_party/angle/src/build_angle.gyp:*',
+ '../third_party/bsdiff/bsdiff.gyp:*',
+ '../third_party/bspatch/bspatch.gyp:*',
+ '../third_party/gles2_book/gles2_book.gyp:*',
+ '../tools/memory_watcher/memory_watcher.gyp:*',
+ ],
+ }, {
+ 'dependencies': [
+ '../third_party/libevent/libevent.gyp:*',
+ ],
+ }],
+ ['toolkit_views==1', {
+ 'dependencies': [
+ '../ui/views/controls/webview/webview.gyp:*',
+ '../ui/views/views.gyp:*',
+ ],
+ }],
+ ['use_aura==1', {
+ 'dependencies': [
+ '../ash/ash.gyp:*',
+ '../ui/aura/aura.gyp:*',
+ '../ui/oak/oak.gyp:*',
+ ],
+ }],
+ ['remoting==1', {
+ 'dependencies': [
+ '../remoting/remoting.gyp:*',
+ ],
+ }],
+ ['use_openssl==0', {
+ 'dependencies': [
+ '../net/third_party/nss/ssl.gyp:*',
+ ],
+ }],
+ ],
+ }, # target_name: All
+ {
+ 'target_name': 'All_syzygy',
+ 'type': 'none',
+ 'conditions': [
+ ['OS=="win" and fastbuild==0', {
+ 'dependencies': [
+ '../chrome/installer/mini_installer_syzygy.gyp:*',
+ ],
+ },
+ ],
+ ],
+ }, # target_name: All_syzygy
+ {
+ 'target_name': 'chromium_builder_tests',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/base.gyp:base_unittests',
+ '../crypto/crypto.gyp:crypto_unittests',
+ '../media/media.gyp:media_unittests',
+ '../net/net.gyp:net_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../ui/ui.gyp:ui_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ 'conditions': [
+ ['OS!="ios"', {
+ 'dependencies': [
+ '../cc/cc_tests.gyp:cc_unittests',
+ '../chrome/chrome.gyp:browser_tests',
+ '../chrome/chrome.gyp:interactive_ui_tests',
+ '../chrome/chrome.gyp:safe_browsing_tests',
+ '../chrome/chrome.gyp:sync_integration_tests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_unittests',
+ '../gpu/gpu.gyp:gpu_unittests',
+ '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+ ],
+ }],
+ ['OS=="win"', {
+ 'dependencies': [
+ '../chrome/chrome.gyp:installer_util_unittests',
+ '../chrome/chrome.gyp:mini_installer_test',
+ # mini_installer_tests depends on mini_installer. This should be
+ # defined in installer.gyp.
+ '../chrome/installer/mini_installer.gyp:mini_installer',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+ '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+ '../courgette/courgette.gyp:courgette_unittests',
+ '../sandbox/sandbox.gyp:sbox_integration_tests',
+ '../sandbox/sandbox.gyp:sbox_unittests',
+ '../sandbox/sandbox.gyp:sbox_validation_tests',
+ '../webkit/webkit.gyp:pull_in_copy_TestNetscapePlugIn',
+ '../ui/views/views.gyp:views_unittests',
+ '../webkit/webkit.gyp:test_shell_common',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'dependencies': [
+ '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+ ],
+ }],
+ ],
+ }, # target_name: chromium_builder_tests
+ {
+ 'target_name': 'chromium_2010_builder_tests',
+ 'type': 'none',
+ 'dependencies': [
+ 'chromium_builder_tests',
+ ],
+ }, # target_name: chromium_2010_builder_tests
+ ],
+ 'conditions': [
+ ['OS!="ios"', {
+ 'targets': [
+ {
+ 'target_name': 'chromium_builder_nacl_win_integration',
+ 'type': 'none',
+ 'dependencies': [
+ 'chromium_builder_qa', # needed for pyauto
+ 'chromium_builder_tests',
+ ],
+ }, # target_name: chromium_builder_nacl_win_integration
+ {
+ 'target_name': 'chromium_builder_perf',
+ 'type': 'none',
+ 'dependencies': [
+ 'chromium_builder_qa', # needed for pyauto
+ '../chrome/chrome.gyp:performance_browser_tests',
+ '../chrome/chrome.gyp:performance_ui_tests',
+ '../chrome/chrome.gyp:sync_performance_tests',
+ ],
+ }, # target_name: chromium_builder_perf
+ {
+ 'target_name': 'chromium_gpu_builder',
+ 'type': 'none',
+ 'dependencies': [
+ '../chrome/chrome.gyp:gpu_tests',
+ '../chrome/chrome.gyp:performance_browser_tests',
+ '../chrome/chrome.gyp:performance_ui_tests',
+ '../gpu/gpu.gyp:gl_tests',
+ ],
+ 'conditions': [
+ ['internal_gles2_conform_tests', {
+ 'dependencies': [
+ '../gpu/gles2_conform_test/gles2_conform_test.gyp:gles2_conform_test',
+ ],
+ }], # internal_gles2_conform
+ ],
+ }, # target_name: chromium_gpu_builder
+ {
+ 'target_name': 'chromium_gpu_debug_builder',
+ 'type': 'none',
+ 'dependencies': [
+ '../chrome/chrome.gyp:gpu_tests',
+ '../gpu/gpu.gyp:gl_tests',
+ ],
+ 'conditions': [
+ ['internal_gles2_conform_tests', {
+ 'dependencies': [
+ '../gpu/gles2_conform_test/gles2_conform_test.gyp:gles2_conform_test',
+ ],
+ }], # internal_gles2_conform
+ ],
+ }, # target_name: chromium_gpu_debug_builder
+ {
+ 'target_name': 'chromium_builder_qa',
+ 'type': 'none',
+ 'dependencies': [
+ '../chrome/chrome.gyp:chromedriver',
+ # Dependencies of pyauto_functional tests.
+ '../remoting/remoting.gyp:remoting_webapp',
+ ],
+# not needed for Mozilla
+# 'conditions': [
+# # If you change this condition, make sure you also change it
+# # in chrome_tests.gypi
+# ['enable_automation==1 and (OS=="mac" or OS=="win" or (os_posix==1 and target_arch==python_arch))', {
+# 'dependencies': [
+# '../chrome/chrome.gyp:pyautolib',
+# ],
+# }],
+ ['OS=="mac"', {
+ 'dependencies': [
+ '../remoting/remoting.gyp:remoting_me2me_host_archive',
+ ],
+ }],
+ ['OS=="win" and component != "shared_library" and wix_exists == "True" and sas_dll_exists == "True"', {
+ 'dependencies': [
+ '../remoting/remoting.gyp:remoting_host_installation',
+ ],
+ }],
+ ],
+ }, # target_name: chromium_builder_qa
+ {
+ 'target_name': 'chromium_builder_perf_av',
+ 'type': 'none',
+ 'dependencies': [
+ 'chromium_builder_qa', # needed for perf pyauto tests
+ '../webkit/webkit.gyp:pull_in_DumpRenderTree', # to run layout tests
+ ],
+ }, # target_name: chromium_builder_perf_av
+ ], # targets
+ }],
+ ['OS=="mac"', {
+ 'targets': [
+ {
+ # Target to build everything plus the dmg. We don't put the dmg
+ # in the All target because developers really don't need it.
+ 'target_name': 'all_and_dmg',
+ 'type': 'none',
+ 'dependencies': [
+ 'All',
+ '../chrome/chrome.gyp:build_app_dmg',
+ ],
+ },
+ # These targets are here so the build bots can use them to build
+ # subsets of a full tree for faster cycle times.
+ {
+ 'target_name': 'chromium_builder_dbg',
+ 'type': 'none',
+ 'dependencies': [
+ '../cc/cc_tests.gyp:cc_unittests',
+ '../chrome/chrome.gyp:browser_tests',
+ '../chrome/chrome.gyp:interactive_ui_tests',
+ '../chrome/chrome.gyp:safe_browsing_tests',
+ '../chrome/chrome.gyp:sync_integration_tests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_unittests',
+ '../ui/ui.gyp:ui_unittests',
+ '../gpu/gpu.gyp:gpu_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
+ '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../rlz/rlz.gyp:*',
+ '../sql/sql.gyp:sql_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_rel',
+ 'type': 'none',
+ 'dependencies': [
+ '../cc/cc_tests.gyp:cc_unittests',
+ '../chrome/chrome.gyp:browser_tests',
+ '../chrome/chrome.gyp:performance_browser_tests',
+ '../chrome/chrome.gyp:performance_ui_tests',
+ '../chrome/chrome.gyp:safe_browsing_tests',
+ '../chrome/chrome.gyp:sync_integration_tests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_unittests',
+ '../ui/ui.gyp:ui_unittests',
+ '../gpu/gpu.gyp:gpu_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
+ '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_dbg_tsan_mac',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/base.gyp:base_unittests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../crypto/crypto.gyp:crypto_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ '../net/net.gyp:net_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_asan_mac',
+ 'type': 'none',
+ 'dependencies': [
+ '../chrome/chrome.gyp:chrome',
+ '../net/net.gyp:dns_fuzz_stub',
+ '../webkit/webkit.gyp:pull_in_DumpRenderTree',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_dbg_valgrind_mac',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/base.gyp:base_unittests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../crypto/crypto.gyp:crypto_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../media/media.gyp:media_unittests',
+ '../net/net.gyp:net_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../chrome/chrome.gyp:safe_browsing_tests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../content/content.gyp:content_unittests',
+ '../ui/ui.gyp:ui_unittests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ },
+ ], # targets
+ }], # OS="mac"
+ ['OS=="win"', {
+ 'targets': [
+ # These targets are here so the build bots can use them to build
+ # subsets of a full tree for faster cycle times.
+ {
+ 'target_name': 'chromium_builder',
+ 'type': 'none',
+ 'dependencies': [
+ '../cc/cc_tests.gyp:cc_unittests',
+ '../chrome/chrome.gyp:browser_tests',
+ '../chrome/chrome.gyp:installer_util_unittests',
+ '../chrome/chrome.gyp:interactive_ui_tests',
+ '../chrome/chrome.gyp:mini_installer_test',
+ '../chrome/chrome.gyp:performance_browser_tests',
+ '../chrome/chrome.gyp:performance_ui_tests',
+ '../chrome/chrome.gyp:safe_browsing_tests',
+ '../chrome/chrome.gyp:sync_integration_tests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_unittests',
+ # mini_installer_tests depends on mini_installer. This should be
+ # defined in installer.gyp.
+ '../chrome/installer/mini_installer.gyp:mini_installer',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+ '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+ '../courgette/courgette.gyp:courgette_unittests',
+ '../ui/ui.gyp:ui_unittests',
+ '../gpu/gpu.gyp:gpu_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
+ '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ '../ui/views/views.gyp:views_unittests',
+ '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+ '../webkit/webkit.gyp:pull_in_copy_TestNetscapePlugIn',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_win_cf',
+ 'type': 'none',
+ 'dependencies': [
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+ '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+ '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_dbg_tsan_win',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/base.gyp:base_unittests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../content/content.gyp:content_unittests',
+ '../crypto/crypto.gyp:crypto_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
+ '../net/net.gyp:net_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_dbg_drmemory_win',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/base.gyp:base_unittests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+ '../content/content.gyp:content_unittests',
+ '../crypto/crypto.gyp:crypto_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../jingle/jingle.gyp:jingle_unittests',
+ '../media/media.gyp:media_unittests',
+ '../net/net.gyp:net_unittests',
+ '../printing/printing.gyp:printing_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+ 'temp_gyp/googleurl.gyp:googleurl_unittests',
+ ],
+ },
+ {
+ 'target_name': 'webkit_builder_win',
+ 'type': 'none',
+ 'dependencies': [
+ '../webkit/webkit.gyp:test_shell',
+ '../webkit/webkit.gyp:test_shell_tests',
+ '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+ '../webkit/webkit.gyp:pull_in_DumpRenderTree',
+ ],
+ },
+ {
+ 'target_name': 'chromium_builder_qa_nacl_win64',
+ 'type': 'none',
+ 'dependencies': [
+ 'chromium_builder_qa', # needed for pyauto
+ '../chrome/chrome.gyp:chrome_nacl_win64',
+ ],
+ }, # target_name: chromium_builder_qa_nacl_win64
+ ], # targets
+ 'conditions': [
+ ['branding=="Chrome"', {
+ 'targets': [
+ {
+ 'target_name': 'chrome_official_builder',
+ 'type': 'none',
+ 'dependencies': [
+ '../chrome/chrome.gyp:chromedriver',
+ '../chrome/chrome.gyp:crash_service',
+ '../chrome/chrome.gyp:crash_service_win64',
+ '../chrome/chrome.gyp:performance_ui_tests',
+ '../chrome/chrome.gyp:policy_templates',
+ '../chrome/chrome.gyp:pyautolib',
+ '../chrome/chrome.gyp:reliability_tests',
+ '../chrome/chrome.gyp:automated_ui_tests',
+ '../chrome/installer/mini_installer.gyp:mini_installer',
+ '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+ '../courgette/courgette.gyp:courgette',
+ '../courgette/courgette.gyp:courgette64',
+ '../cloud_print/cloud_print.gyp:cloud_print',
+ '../remoting/remoting.gyp:remoting_webapp',
+ '../third_party/adobe/flash/flash_player.gyp:flash_player',
+ '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmplugin',
+ ],
+ 'conditions': [
+ ['internal_pdf', {
+ 'dependencies': [
+ '../pdf/pdf.gyp:pdf',
+ ],
+ }], # internal_pdf
+ ['component != "shared_library" and wix_exists == "True" and \
+ sas_dll_exists == "True"', {
+ 'dependencies': [
+ '../remoting/remoting.gyp:remoting_host_installation',
+ ],
+ }], # component != "shared_library"
+ ]
+ },
+ ], # targets
+ }], # branding=="Chrome"
+ ], # conditions
+ }], # OS="win"
+ ['use_aura==1', {
+ 'targets': [
+ {
+ 'target_name': 'aura_builder',
+ 'type': 'none',
+ 'dependencies': [
+ '../cc/cc_tests.gyp:cc_unittests',
+ '../chrome/chrome.gyp:browser_tests',
+ '../chrome/chrome.gyp:chrome',
+ '../chrome/chrome.gyp:interactive_ui_tests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../content/content.gyp:content_browsertests',
+ '../content/content.gyp:content_unittests',
+ '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+ '../remoting/remoting.gyp:remoting_unittests',
+ '../ui/aura/aura.gyp:*',
+ '../ui/compositor/compositor.gyp:*',
+ '../ui/ui.gyp:ui_unittests',
+ '../ui/views/views.gyp:views',
+ '../ui/views/views.gyp:views_unittests',
+ '../webkit/compositor_bindings/compositor_bindings_tests.gyp:webkit_compositor_bindings_unittests',
+ '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ # Remove this when we have the real compositor.
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)',
+ 'files': ['../third_party/directxsdk/files/dlls/D3DX10d_43.dll']
+ },
+ ],
+ 'dependencies': [
+ '../content/content.gyp:content_unittests',
+ '../chrome/chrome.gyp:crash_service',
+ '../chrome/chrome.gyp:crash_service_win64',
+ ],
+ }],
+ ['use_ash==1', {
+ 'dependencies': [
+ '../ash/ash.gyp:ash_shell',
+ '../ash/ash.gyp:ash_unittests',
+ ],
+ }],
+ ['OS=="linux"', {
+ # Tests that currently only work on Linux.
+ 'dependencies': [
+ '../base/base.gyp:base_unittests',
+ '../content/content.gyp:content_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../sql/sql.gyp:sql_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ ],
+ }],
+ ['OS=="mac"', {
+ # Exclude dependencies that are not currently implemented.
+ 'dependencies!': [
+ '../chrome/chrome.gyp:chrome',
+ '../chrome/chrome.gyp:unit_tests',
+ '../ui/views/views.gyp:views_unittests',
+ ],
+ }],
+ ['chromeos==1', {
+ 'dependencies': [
+ '../chromeos/chromeos.gyp:chromeos_unittests',
+ ],
+ }],
+ ],
+ },
+ ], # targets
+ }], # "use_aura==1"
+ ['test_isolation_mode != "noop"', {
+ 'targets': [
+ {
+ 'target_name': 'chromium_swarm_tests',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/base.gyp:base_unittests_run',
+ '../chrome/chrome.gyp:browser_tests_run',
+ '../chrome/chrome.gyp:unit_tests_run',
+ '../net/net.gyp:net_unittests_run',
+ ],
+ }, # target_name: chromium_swarm_tests
+ ],
+ }],
+ ], # conditions
+}
diff --git a/media/webrtc/trunk/build/all_android.gyp b/media/webrtc/trunk/build/all_android.gyp
new file mode 100644
index 000000000..1b110f2de
--- /dev/null
+++ b/media/webrtc/trunk/build/all_android.gyp
@@ -0,0 +1,115 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is all.gyp file for Android to prevent breakage in Android and other
+# platform; It will be churning a lot in the short term and eventually be merged
+# into all.gyp.
+
+{
+ 'variables': {
+ # A hook that can be overridden in other repositories to add additional
+ # compilation targets to 'All'
+ 'android_app_targets%': [],
+ },
+ 'targets': [
+ {
+ 'target_name': 'All',
+ 'type': 'none',
+ 'dependencies': [
+ '../content/content.gyp:content_shell_apk',
+ '<@(android_app_targets)',
+ 'android_builder_tests',
+ '../android_webview/android_webview.gyp:android_webview_apk',
+ '../chrome/chrome.gyp:chromium_testshell',
+ ],
+ }, # target_name: All
+ {
+ # The current list of tests for android. This is temporary
+ # until the full set supported. If adding a new test here,
+ # please also add it to build/android/run_tests.py, else the
+ # test is not run.
+ #
+ # WARNING:
+ # Do not add targets here without communicating the implications
+ # on tryserver triggers and load. Discuss with jrg please.
+ 'target_name': 'android_builder_tests',
+ 'type': 'none',
+ 'dependencies': [
+ '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests',
+ '../base/base.gyp:base_unittests',
+ '../cc/cc_tests.gyp:cc_unittests',
+ '../chrome/chrome.gyp:unit_tests',
+ '../content/content.gyp:content_shell_test_apk',
+ '../content/content.gyp:content_unittests',
+ '../gpu/gpu.gyp:gpu_unittests',
+ '../ipc/ipc.gyp:ipc_tests',
+ '../media/media.gyp:media_unittests',
+ '../net/net.gyp:net_unittests',
+ '../sql/sql.gyp:sql_unittests',
+ '../sync/sync.gyp:sync_unit_tests',
+ '../third_party/WebKit/Source/WebKit/chromium/All.gyp:*',
+ '../tools/android/device_stats_monitor/device_stats_monitor.gyp:device_stats_monitor',
+ '../tools/android/fake_dns/fake_dns.gyp:fake_dns',
+ '../tools/android/forwarder2/forwarder.gyp:forwarder2',
+ '../tools/android/md5sum/md5sum.gyp:md5sum',
+ '../ui/ui.gyp:ui_unittests',
+ # From here down: not added to run_tests.py yet.
+ '../jingle/jingle.gyp:jingle_unittests',
+ # Required by ui_unittests.
+ # TODO(wangxianzhu): It'd better let ui_unittests depend on it, but
+ # this would cause circular gyp dependency which needs refactoring the
+ # gyps to resolve.
+ '../chrome/chrome_resources.gyp:packed_resources',
+ ],
+ 'conditions': [
+ ['linux_breakpad==1', {
+ 'dependencies': [
+ '../breakpad/breakpad.gyp:breakpad_unittests',
+ ],
+ }],
+ ['"<(gtest_target_type)"=="shared_library"', {
+ 'dependencies': [
+ # The first item is simply the template. We add as a dep
+ # to make sure it builds in ungenerated form. TODO(jrg):
+ # once stable, transition to a test-only (optional)
+ # target.
+ '../testing/android/native_test.gyp:native_test_apk',
+ # Unit test bundles packaged as an apk.
+ '../base/base.gyp:base_unittests_apk',
+ '../cc/cc_tests.gyp:cc_unittests_apk',
+ '../chrome/chrome.gyp:unit_tests_apk',
+ '../content/content.gyp:content_unittests_apk',
+ '../gpu/gpu.gyp:gpu_unittests_apk',
+ '../ipc/ipc.gyp:ipc_tests_apk',
+ '../media/media.gyp:media_unittests_apk',
+ '../net/net.gyp:net_unittests_apk',
+ '../sql/sql.gyp:sql_unittests_apk',
+ '../sync/sync.gyp:sync_unit_tests_apk',
+ '../ui/ui.gyp:ui_unittests_apk',
+ '../android_webview/android_webview.gyp:android_webview_test_apk',
+ '../chrome/chrome.gyp:chromium_testshell_test_apk',
+ ],
+ }],
+ ],
+ },
+ {
+ # Experimental / in-progress targets that are expected to fail
+ # but we still try to compile them on bots (turning the stage
+ # orange, not red).
+ 'target_name': 'android_experimental',
+ 'type': 'none',
+ 'dependencies': [
+ ],
+ },
+ {
+ # In-progress targets that are expected to fail and are NOT run
+ # on any bot.
+ 'target_name': 'android_in_progress',
+ 'type': 'none',
+ 'dependencies': [
+ '../content/content.gyp:content_browsertests',
+ ],
+ },
+ ], # targets
+}
diff --git a/media/webrtc/trunk/build/android/adb_device_functions.sh b/media/webrtc/trunk/build/android/adb_device_functions.sh
new file mode 100755
index 000000000..66cc32fc4
--- /dev/null
+++ b/media/webrtc/trunk/build/android/adb_device_functions.sh
@@ -0,0 +1,139 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# A collection of functions useful for maintaining android devices
+
+
+# Run an adb command on all connected device in parallel.
+# Usage: adb_all command line to eval. Quoting is optional.
+#
+# Examples:
+# adb_all install Chrome.apk
+# adb_all 'shell cat /path/to/file'
+#
+adb_all() {
+ if [[ $# == 0 ]]; then
+ echo "Usage: adb_all <adb command>. Quoting is optional."
+ echo "Example: adb_all install Chrome.apk"
+ return 1
+ fi
+ local DEVICES=$(adb_get_devices -b)
+ local NUM_DEVICES=$(echo $DEVICES | wc -w)
+ if (( $NUM_DEVICES > 1 )); then
+ echo "Looping over $NUM_DEVICES devices"
+ fi
+ _adb_multi "$DEVICES" "$*"
+}
+
+
+# Run a command on each connected device. Quoting the command is suggested but
+# not required. The script setups up variable DEVICE to correspond to the
+# current serial number. Intended for complex one_liners that don't work in
+# adb_all
+# Usage: adb_device_loop 'command line to eval'
+adb_device_loop() {
+ if [[ $# == 0 ]]; then
+ echo "Intended for more complex one-liners that cannot be done with" \
+ "adb_all."
+ echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \
+ 'adb shell cat /data/local.prop)"'
+ return 1
+ fi
+ local DEVICES=$(adb_get_devices)
+ if [[ -z $DEVICES ]]; then
+ return
+ fi
+ # Do not change DEVICE variable name - part of api
+ for DEVICE in $DEVICES; do
+ DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//')
+ echo "Running on $DEVICE ($DEV_TYPE)"
+ ANDROID_SERIAL=$DEVICE eval "$*"
+ done
+}
+
+# Erases data from any devices visible on adb. To preserve a device,
+# disconnect it or:
+# 1) Reboot it into fastboot with 'adb reboot bootloader'
+# 2) Run wipe_all_devices to wipe remaining devices
+# 3) Restore device it with 'fastboot reboot'
+#
+# Usage: wipe_all_devices [-f]
+#
+wipe_all_devices() {
+ if [[ -z $(which adb) || -z $(which fastboot) ]]; then
+ echo "aborting: adb and fastboot not in path"
+ return 1
+ elif ! $(groups | grep -q 'plugdev'); then
+ echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'"
+ fi
+
+ local DEVICES=$(adb_get_devices -b)
+
+ if [[ $1 != '-f' ]]; then
+ echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device."
+ read -p "Hit enter to continue"
+ fi
+
+ _adb_multi "$DEVICES" "reboot bootloader"
+ # Subshell to isolate job list
+ (
+ for DEVICE in $DEVICES; do
+ fastboot_erase $DEVICE &
+ done
+ wait
+ )
+
+ # Reboot devices together
+ for DEVICE in $DEVICES; do
+ fastboot -s $DEVICE reboot
+ done
+}
+
+# Wipe a device in fastboot.
+# Usage fastboot_erase [serial]
+fastboot_erase() {
+ if [[ -n $1 ]]; then
+ echo "Wiping $1"
+ local SERIAL="-s $1"
+ else
+ if [ -z $(fastboot devices) ]; then
+ echo "No devices in fastboot, aborting."
+ echo "Check out wipe_all_devices to see if sufficient"
+ echo "You can put a device in fastboot using adb reboot bootloader"
+ return 1
+ fi
+ local SERIAL=""
+ fi
+ fastboot $SERIAL erase cache
+ fastboot $SERIAL erase userdata
+}
+
+# Get list of devices connected via adb
+# Args: -b block until adb detects a device
+adb_get_devices() {
+ local DEVICES="$(adb devices | grep 'device$')"
+ if [[ -z $DEVICES && $1 == '-b' ]]; then
+ echo '- waiting for device -' >&2
+ local DEVICES="$(adb wait-for-device devices | grep 'device$')"
+ fi
+ echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/'
+}
+
+###################################################
+## HELPER FUNCTIONS
+###################################################
+
+# Run an adb command in parallel over a device list
+_adb_multi() {
+ local DEVICES=$1
+ local ADB_ARGS=$2
+ (
+ for DEVICE in $DEVICES; do
+ adb -s $DEVICE $ADB_ARGS &
+ done
+ wait
+ )
+}
diff --git a/media/webrtc/trunk/build/android/adb_kill_content_shell b/media/webrtc/trunk/build/android/adb_kill_content_shell
new file mode 100755
index 000000000..d24c7a995
--- /dev/null
+++ b/media/webrtc/trunk/build/android/adb_kill_content_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running content shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+ echo "Not running Content shell."
+else
+ SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+ if [ "$SHELL_PID" != "" ] ; then
+ set -x
+ adb shell kill $SHELL_PID
+ set -
+ else
+ echo "Content shell does not appear to be running."
+ fi
+fi
diff --git a/media/webrtc/trunk/build/android/adb_logcat_monitor.py b/media/webrtc/trunk/build/android/adb_logcat_monitor.py
new file mode 100755
index 000000000..aeaef0b09
--- /dev/null
+++ b/media/webrtc/trunk/build/android/adb_logcat_monitor.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create. The
+script will run until killed by an external signal. To test, run the
+script in a shell and <Ctrl>-C it after a while. It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+ """Exception used to signal a timeout."""
+ pass
+
+
+class SigtermError(Exception):
+ """Exception used to catch a sigterm."""
+ pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+ """Spawns a adb logcat process if one is not currently running."""
+ process, logcat_num = devices[device_id]
+ if process:
+ if process.poll() is None:
+ # Logcat process is still happily running
+ return
+ else:
+ logging.info('Logcat for device %s has died', device_id)
+ error_filter = re.compile('- waiting for device -')
+ for line in process.stderr:
+ if not error_filter.match(line):
+ logging.error(device_id + ': ' + line)
+
+ logging.info('Starting logcat %d for device %s', logcat_num,
+ device_id)
+ logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+ logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+ process = subprocess.Popen([adb_cmd, '-s', device_id,
+ 'logcat', '-v', 'threadtime'],
+ stdout=logcat_file,
+ stderr=subprocess.PIPE)
+ devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+ """Gets the device list from adb.
+
+ We use an alarm in this function to avoid deadlocking from an external
+ dependency.
+
+ Args:
+ adb_cmd: binary to run adb
+
+ Returns:
+ list of devices or an empty list on timeout
+ """
+ signal.alarm(2)
+ try:
+ out, err = subprocess.Popen([adb_cmd, 'devices'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()
+ if err:
+ logging.warning('adb device error %s', err.strip())
+ return re.findall('^(\w+)\tdevice$', out, re.MULTILINE)
+ except TimeoutException:
+ logging.warning('"adb devices" command timed out')
+ return []
+ except (IOError, OSError):
+ logging.exception('Exception from "adb devices"')
+ return []
+ finally:
+ signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+ """Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill."""
+ # We create the directory to ensure 'run once' semantics
+ if os.path.exists(base_dir):
+ print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+ shutil.rmtree(base_dir, ignore_errors=True)
+
+ os.makedirs(base_dir)
+ logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+ level=logging.INFO,
+ format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+ # Set up the alarm for calling 'adb devices'. This is to ensure
+ # our script doesn't get stuck waiting for a process response
+ def TimeoutHandler(_, unused_frame):
+ raise TimeoutException()
+ signal.signal(signal.SIGALRM, TimeoutHandler)
+
+ # Handle SIGTERMs to ensure clean shutdown
+ def SigtermHandler(_, unused_frame):
+ raise SigtermError()
+ signal.signal(signal.SIGTERM, SigtermHandler)
+
+ logging.info('Started with pid %d', os.getpid())
+ pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+ try:
+ with open(pid_file_path, 'w') as f:
+ f.write(str(os.getpid()))
+ while True:
+ for device_id in GetAttachedDevices(adb_cmd):
+ if not device_id in devices:
+ devices[device_id] = (None, 0)
+
+ for device in devices:
+ # This will spawn logcat watchers for any device ever detected
+ StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+ time.sleep(5)
+ except SigtermError:
+ logging.info('Received SIGTERM, shutting down')
+ except:
+ logging.exception('Unexpected exception in main.')
+ finally:
+ for process, _ in devices.itervalues():
+ if process:
+ try:
+ process.terminate()
+ except OSError:
+ pass
+ os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+ if 2 <= len(sys.argv) <= 3:
+ print 'adb_logcat_monitor: Initializing'
+ sys.exit(main(*sys.argv[1:3]))
+
+ print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/media/webrtc/trunk/build/android/adb_logcat_printer.py b/media/webrtc/trunk/build/android/adb_logcat_printer.py
new file mode 100755
index 000000000..5194668ec
--- /dev/null
+++ b/media/webrtc/trunk/build/android/adb_logcat_printer.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+
+import cStringIO
+import logging
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+ """Splices together multiple logcats from the same device.
+
+ Args:
+ list_of_lists: list of pairs (filename, list of timestamped lines)
+ logger: handler to log events
+
+ Returns:
+ list of lines with duplicates removed
+ """
+ cur_device_log = ['']
+ for cur_file, cur_file_lines in list_of_lists:
+ # Ignore files with just the logcat header
+ if len(cur_file_lines) < 2:
+ continue
+ common_index = 0
+ # Skip this step if list just has empty string
+ if len(cur_device_log) > 1:
+ try:
+ line = cur_device_log[-1]
+ # Used to make sure we only splice on a timestamped line
+ if re.match('^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+ common_index = cur_file_lines.index(line)
+ else:
+ logger.warning('splice error - no timestamp in "%s"?', line.strip())
+ except ValueError:
+ # The last line was valid but wasn't found in the next file
+ cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+ logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+ cur_device_log += ['*'*30 + ' %s' % cur_file]
+ cur_device_log.extend(cur_file_lines[common_index:])
+
+ return cur_device_log
+
+
+def FindLogFiles(base_dir):
+ """Search a directory for logcat files.
+
+ Args:
+ base_dir: directory to search
+
+ Returns:
+ Mapping of device_id to a sorted list of file paths for a given device
+ """
+ logcat_filter = re.compile('^logcat_(\w+)_(\d+)$')
+ # list of tuples (<device_id>, <seq num>, <full file path>)
+ filtered_list = []
+ for cur_file in os.listdir(base_dir):
+ matcher = logcat_filter.match(cur_file)
+ if matcher:
+ filtered_list += [(matcher.group(1), int(matcher.group(2)),
+ os.path.join(base_dir, cur_file))]
+ filtered_list.sort()
+ file_map = {}
+ for device_id, _, cur_file in filtered_list:
+ if not device_id in file_map:
+ file_map[device_id] = []
+
+ file_map[device_id] += [cur_file]
+ return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+ """Read log files, combine and format.
+
+ Args:
+ log_filenames: mapping of device_id to sorted list of file paths
+ logger: logger handle for logging events
+
+ Returns:
+ list of formatted device logs, one for each device.
+ """
+ device_logs = []
+
+ for device, device_files in log_filenames.iteritems():
+ logger.debug('%s: %s', device, str(device_files))
+ device_file_lines = []
+ for cur_file in device_files:
+ with open(cur_file) as f:
+ device_file_lines += [(cur_file, f.read().splitlines())]
+ combined_lines = CombineLogFiles(device_file_lines, logger)
+ # Prepend each line with a short unique ID so it's easy to see
+ # when the device changes. We don't use the start of the device
+ # ID because it can be the same among devices. Example lines:
+ # AB324: foo
+ # AB324: blah
+ device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
+ return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+ """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+ try:
+ monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+ with open(monitor_pid_path) as f:
+ monitor_pid = int(f.readline())
+
+ logger.info('Sending SIGTERM to %d', monitor_pid)
+ os.kill(monitor_pid, signal.SIGTERM)
+ i = 0
+ while True:
+ time.sleep(.2)
+ if not os.path.exists(monitor_pid_path):
+ return
+ if not os.path.exists('/proc/%d' % monitor_pid):
+ logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+ return
+ logger.info('Waiting for logcat process to terminate.')
+ i += 1
+ if i >= 10:
+ logger.warning('Monitor pid did not terminate. Continuing anyway.')
+ return
+
+ except (ValueError, IOError, OSError):
+ logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(base_dir, output_file):
+ log_stringio = cStringIO.StringIO()
+ logger = logging.getLogger('LogcatPrinter')
+ logger.setLevel(LOG_LEVEL)
+ sh = logging.StreamHandler(log_stringio)
+ sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+ ' %(message)s'))
+ logger.addHandler(sh)
+
+ try:
+ # Wait at least 5 seconds after base_dir is created before printing.
+ #
+ # The idea is that 'adb logcat > file' output consists of 2 phases:
+ # 1 Dump all the saved logs to the file
+ # 2 Stream log messages as they are generated
+ #
+ # We want to give enough time for phase 1 to complete. There's no
+ # good method to tell how long to wait, but it usually only takes a
+ # second. On most bots, this code path won't occur at all, since
+ # adb_logcat_monitor.py command will have spawned more than 5 seconds
+ # prior to called this shell script.
+ try:
+ sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+ except OSError:
+ sleep_time = 5
+ if sleep_time > 0:
+ logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+ time.sleep(sleep_time)
+
+ assert os.path.exists(base_dir), '%s does not exist' % base_dir
+ ShutdownLogcatMonitor(base_dir, logger)
+ separator = '\n' + '*' * 80 + '\n\n'
+ for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+ output_file.write(log)
+ output_file.write(separator)
+ with open(os.path.join(base_dir, 'eventlog')) as f:
+ output_file.write('\nLogcat Monitor Event Log\n')
+ output_file.write(f.read())
+ except:
+ logger.exception('Unexpected exception')
+
+ logger.info('Done.')
+ sh.flush()
+ output_file.write('\nLogcat Printer Event Log\n')
+ output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+ if len(sys.argv) == 1:
+ print 'Usage: %s <base_dir>' % sys.argv[0]
+ sys.exit(1)
+ sys.exit(main(sys.argv[1], sys.stdout))
diff --git a/media/webrtc/trunk/build/android/adb_run_content_shell b/media/webrtc/trunk/build/android/adb_run_content_shell
new file mode 100755
index 000000000..bfc7367b7
--- /dev/null
+++ b/media/webrtc/trunk/build/android/adb_run_content_shell
@@ -0,0 +1,14 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+ INTENT_ARGS='-d "$1"' # e.g. a URL
+fi
+
+adb shell am start \
+ -a android.intent.action.VIEW \
+ -n org.chromium.content_shell/.ContentShellActivity \
+ $INTENT_ARGS
diff --git a/media/webrtc/trunk/build/android/ant/chromium-jars.xml b/media/webrtc/trunk/build/android/ant/chromium-jars.xml
new file mode 100644
index 000000000..7007df5dc
--- /dev/null
+++ b/media/webrtc/trunk/build/android/ant/chromium-jars.xml
@@ -0,0 +1,97 @@
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+<project name="chromium-jars" default="dist">
+ <!--
+ Common ant build file for for chromium_*.jars.
+ For creating a new chromium_*.jar :
+ 1. Use build/java.gypi action.
+ The jar will be created as chromium_${PACKAGE_NAME} in
+ ${PRODUCT_DIR}/lib.java.
+ -->
+ <description>
+ Building ${PROJECT_NAME}/ java source code with ant.
+ </description>
+
+ <import file="common.xml"/>
+
+ <path id="javac.custom.classpath">
+ <filelist files="${INPUT_JARS_PATHS}"/>
+ <pathelement location="${ANDROID_SDK}/android.jar"/>
+ </path>
+
+ <path id="javac.srcdirs.additional">
+ <filelist files="${ADDITIONAL_SRC_DIRS}"/>
+ <filelist files="${GENERATED_SRC_DIRS}"/>
+ </path>
+
+ <property-value
+ name="javac.srcdir"
+ value="src:${toString:javac.srcdirs.additional}"
+ />
+
+ <property-location
+ name="dest.dir"
+ location="${PRODUCT_DIR}/java/${PACKAGE_NAME}"
+ check-exists="false"
+ />
+
+ <target name="init">
+ <!-- Create the time stamp -->
+ <tstamp/>
+ <!-- Create the build directory structure used by compile -->
+ <mkdir dir="${dest.dir}"/>
+
+ <!-- Remove all .class files from dest.dir. This prevents inclusion of
+ incorrect .class files in the final .jar. For example, if a .java file
+ was deleted, the .jar should not contain the .class files for that
+ .java from previous builds.
+ -->
+ <delete>
+ <fileset dir="${dest.dir}" includes="**/*.class"/>
+ </delete>
+ </target>
+
+ <target name="compile" depends="init" description="Compiles source.">
+ <fail message="Error: javac.custom.classpath is not set. Please set it to
+ classpath for javac.">
+ <condition>
+ <not><isreference refid="javac.custom.classpath"/></not>
+ </condition>
+ </fail>
+
+ <echo>
+ Compiling ${javac.srcdir}, classpath: ${toString:javac.custom.classpath}
+ </echo>
+
+ <javac
+ srcdir="${javac.srcdir}"
+ destdir="${dest.dir}"
+ classpathref="javac.custom.classpath"
+ debug="true"
+ includeantruntime="false"
+ />
+ </target>
+
+ <target name="dist" depends="compile"
+ description="Generate chromium_${PACKAGE_NAME}.jar.">
+ <!-- Create the distribution directory -->
+ <jar
+ jarfile="${lib.java.dir}/chromium_${PACKAGE_NAME}.jar"
+ basedir="${dest.dir}"
+ />
+
+ <!-- If Gyp thinks this output is stale but Ant doesn't, the modification
+ time should still be updated. Otherwise, this target will continue to
+ be rebuilt in future builds.
+ -->
+ <touch file="${lib.java.dir}/chromium_${PACKAGE_NAME}.jar"/>
+ </target>
+
+ <target name="clean" description="clean up">
+ <!-- Delete the appropriate directory trees -->
+ <delete dir="${dest.dir}"/>
+ </target>
+</project>
diff --git a/media/webrtc/trunk/build/android/ant/common.xml b/media/webrtc/trunk/build/android/ant/common.xml
new file mode 100644
index 000000000..1001f19eb
--- /dev/null
+++ b/media/webrtc/trunk/build/android/ant/common.xml
@@ -0,0 +1,90 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+<project name="chrome_common_defines">
+ <!-- Common build properties for Chrome for android. -->
+
+ <!--
+ Macro for checking that a property is correctly set. Performs checks for:
+ 1. Property is set and not null.
+ 2. String value of property does not contains any '$' signs.
+ -->
+ <macrodef name="check-property-value">
+ <attribute name="property"/>
+ <sequential>
+ <fail message ="Property @{property} is not set.">
+ <condition>
+ <or>
+ <not><isset property="@{property}"/></not>
+ <length string="${@{property}}" trim="true" when="less" length="1"/>
+ </or>
+ </condition>
+ </fail>
+ <!--
+ Check for $ signs. This catches errors when properties are initialized from environment
+ variables. E.g. if we have <property name="foo" value="${env.bar}" /> but env.bar is
+ not set then foo will have the literal value of '${env.bar}'.
+ -->
+ <fail message="Value checked failed for property: @{property} : ${@{property}}.
+ Property value contains an uninitialized environment variable.">
+ <condition>
+ <contains string="${@{property}}" substring="$"/>
+ </condition>
+ </fail>
+ </sequential>
+ </macrodef>
+
+ <!--
+ A safe setter for location properties. Checks that a location is not
+ empty and actually exists. For specifying output directories, location
+ check can be disabled by specifying check-exists="false".
+ -->
+ <macrodef name="property-location">
+ <attribute name="name"/>
+ <attribute name="location"/>
+ <attribute name="check-exists" default="true"/>
+ <sequential>
+ <property name="@{name}" location="@{location}"/>
+ <check-property-value property="@{name}"/>
+ <fail message="Location specified for @{name} : @{location} does not exist.">
+ <condition>
+ <and>
+ <equals arg1="@{check-exists}" arg2="true"/>
+ <not><available file="@{location}"/></not>
+ </and>
+ </condition>
+ </fail>
+ </sequential>
+ </macrodef>
+
+ <!-- A safe setter for property values -->
+ <macrodef name="property-value">
+ <attribute name="name"/>
+ <attribute name="value"/>
+ <sequential>
+ <property name="@{name}" value="@{value}"/>
+ <check-property-value property="@{name}"/>
+ </sequential>
+ </macrodef>
+
+ <!-- Common environment properties. -->
+ <property-location name="sdk.dir" location="${ANDROID_SDK_ROOT}"/>
+ <property-value name="target" value="android-${ANDROID_SDK_VERSION}"/>
+ <property name="source.dir" location="src"/>
+ <property-location name="android.gdbserver" location="${ANDROID_GDBSERVER}"/>
+ <!--
+ Common directories used by SDK Build, when making changes here
+ make sure to update gyp files and test scripts constants in
+ build/android/pylib/constants.py
+ -->
+ <!-- Common directory for chromium_*.jars. -->
+ <property-location name="lib.java.dir" location="${PRODUCT_DIR}/lib.java"/>
+ <!-- Common directory for test jars. -->
+ <property-location name="test.lib.java.dir"
+ location="${PRODUCT_DIR}/test.lib.java"/>
+ <!-- Common directory for apks. -->
+ <property-location name="apks.dir" location="${PRODUCT_DIR}/apks"/>
+</project>
diff --git a/media/webrtc/trunk/build/android/ant/sdk-targets.xml b/media/webrtc/trunk/build/android/ant/sdk-targets.xml
new file mode 100644
index 000000000..b692f6ec6
--- /dev/null
+++ b/media/webrtc/trunk/build/android/ant/sdk-targets.xml
@@ -0,0 +1,284 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+<project name="chrome_sdk_overrides" >
+ <!--
+ Redefinition of targets used by SDK tools.
+ Supported version: SDK tools revision 20.
+
+ SDK tools do not allow easy way of extending classpaths
+ for aidl and javac. This file defines targets which can be used to
+ override targets used by tools.
+ -->
+ <target name="-pre-compile">
+ <!--
+ Remove all .class files from the output directory. This prevents inclusion of incorrect .class
+ files in the final apk. For example, if a .java file was deleted, the apk should not contain
+ the .class files for that .java from previous builds.
+ -->
+ <delete>
+ <fileset dir="${out.classes.absolute.dir}" includes="**/*.class"/>
+ </delete>
+ </target>
+
+ <!--
+ Override the -compile target.
+ This target requires 'javac.custom.classpath' to be set to reference
+ of classpath to be used for javac. Also accepts custom path for
+ sources: 'javac.custom.sourcepath'.
+ -->
+ <target
+ name="-compile"
+ depends="-build-setup, -pre-build, -code-gen, -pre-compile">
+ <do-only-if-manifest-hasCode elseText="hasCode = false. Skipping..." >
+ <!-- If javac.srcdirs.additional isn't set, set it to an empty path. -->
+ <if>
+ <condition>
+ <not>
+ <isreference refid="javac.srcdirs.additional"/>
+ </not>
+ </condition>
+ <then>
+ <path id="javac.srcdirs.additional"/>
+ </then>
+ </if>
+ <javac
+ bootclasspathref="project.target.class.path"
+ classpathref="javac.custom.classpath"
+ debug="true"
+ destdir="${out.classes.absolute.dir}"
+ encoding="${java.encoding}"
+ extdirs=""
+ fork="${need.javac.fork}"
+ includeantruntime="false"
+ source="${java.source}"
+ target="${java.target}"
+ verbose="${verbose}">
+ <src path="${source.absolute.dir}"/>
+ <src path="${gen.absolute.dir}"/>
+ <src>
+ <path refid="javac.srcdirs.additional"/>
+ </src>
+ <compilerarg line="${java.compilerargs}"/>
+ </javac>
+ <!--
+ If the project is instrumented, then instrument the classes
+ TODO(shashishekhar): Add option to override emma filter.
+ -->
+ <if condition="${build.is.instrumented}">
+ <then>
+ <echo level="info">
+ Instrumenting classes from ${out.absolute.dir}/classes...
+ </echo>
+ <!-- build the default filter to remove R, Manifest, BuildConfig -->
+ <getemmafilter
+ appPackage="${project.app.package}"
+ filterOut="emma.default.filter"
+ libraryPackagesRefId="project.library.packages"/>
+ <!--
+ Define where the .em file is output.
+ This may have been setup already if this is a library.
+ -->
+ <property name="emma.coverage.absolute.file"
+ location="${out.absolute.dir}/coverage.em"/>
+ <!-- It only instruments class files, not any external libs -->
+
+ <emma enabled="true">
+ <instr
+ instrpath="${out.absolute.dir}/classes"
+ metadatafile="${emma.coverage.absolute.file}"
+ mode="overwrite"
+ outdir="${out.absolute.dir}/classes"
+ verbosity="${verbosity}">
+ <filter excludes="${emma.default.filter}"/>
+ <filter value="${emma.filter}"/>
+ </instr>
+ </emma>
+ </then>
+ </if>
+ <!--
+ If the project needs a test jar then generate a jar containing
+ all compiled classes and referenced jars.
+ project.is.testapp is set by Android's ant build system based on the
+ target's manifest. It is true only for instrumentation apks.
+ -->
+ <if condition="${project.is.testapp}">
+ <then>
+ <echo level="info">Creating test jar file:
+ ${ant.project.name}-debug.jar</echo>
+ <property-location name="create.test.jar.file"
+ location="${CHROMIUM_SRC}/build/android/ant/create-test-jar.js"/>
+ <script language="javascript" src="${create.test.jar.file}"/>
+ </then>
+ </if>
+
+ </do-only-if-manifest-hasCode>
+ </target>
+
+ <!--
+ For debug builds, the Android SDK tools create a key in ~/.android and sign the build with it.
+ This has caused all kinds of issues. Instead, the debug build should be signed with a key in
+ build/android/ant. The SDK tools do not provide any support for overriding that behavior and so
+ instead one must use the hack below.
+ -->
+
+ <!-- Disables automatic signing. -->
+ <property name="build.is.signing.debug" value="false"/>
+
+ <!-- TODO(cjhopman): Remove this property when all gyp files define the CHROMIUM_SRC property. -->
+ <property name="CHROMIUM_SRC" value="${PRODUCT_DIR}/../.." />
+
+ <property name="key.store" value="${CHROMIUM_SRC}/build/android/ant/chromium-debug.keystore"/>
+ <property name="key.store.password" value="chromium"/>
+ <property name="key.alias" value="chromiumdebugkey"/>
+ <property name="key.alias.password" value="chromium"/>
+
+ <!-- SDK tools assume that out.packaged.file is signed and name it "...-unaligned" -->
+ <property name="out.packaged.file"
+ value="${apks.dir}/${ant.project.name}-debug-unsigned.apk" />
+ <property name="out.unaligned.file"
+ value="${apks.dir}/${ant.project.name}-debug-unaligned.apk" />
+
+ <!-- By default, the SDK tools build only aligns the APK in the -do-debug target. -->
+ <target name="-do-debug"
+ depends="-set-debug-mode, -debug-obfuscation-check, -package, -post-package">
+ <!-- only create apk if *not* a library project -->
+ <do-only-if-not-library elseText="Library project: do not create apk..." >
+ <sequential>
+ <!-- Signs the APK -->
+ <echo level="info">Signing final apk...</echo>
+ <signapk
+ input="${out.packaged.file}"
+ output="${out.unaligned.file}"
+ keystore="${key.store}"
+ storepass="${key.store.password}"
+ alias="${key.alias}"
+ keypass="${key.alias.password}"/>
+
+ <!-- Zip aligns the APK -->
+ <zipalign-helper
+ in.package="${out.unaligned.file}"
+ out.package="${out.final.file}" />
+ <echo level="info">Release Package: ${out.final.file}</echo>
+ </sequential>
+ </do-only-if-not-library>
+ <record-build-info />
+ </target>
+
+ <path id="native.libs.gdbserver">
+ <fileset file="${android.gdbserver}"/>
+ </path>
+
+ <target name="-post-compile">
+ <!--
+ Copy gdbserver to main libs directory if building a non-instrumentation debug apk.
+ TODO(jrg): For now, Chrome on Android always builds native code
+ as Release and java/ant as Debug, which means we always install
+ gdbserver. Resolve this discrepancy, possibly by making this
+ Release Official build java/ant as Release.
+ -->
+ <if>
+ <condition>
+ <and>
+ <equals arg1="${build.target}" arg2="debug"/>
+ <isfalse value="${project.is.testapp}"/>
+ </and>
+ </condition>
+ <then>
+ <echo message="Copying gdbserver to the apk to enable native debugging"/>
+ <copy todir="${out.dir}/libs/${target.abi}">
+ <path refid="native.libs.gdbserver"/>
+ </copy>
+ </then>
+ </if>
+
+ <!-- Package all the compiled .class files into a .jar. -->
+ <jar
+ jarfile="${lib.java.dir}/chromium_apk_${PACKAGE_NAME}.jar"
+ basedir="${out.classes.absolute.dir}"
+ />
+ </target>
+
+ <!--
+ Override obfuscate target to pass javac.custom.classpath to Proguard. SDK tools do not provide
+ any way to pass custom class paths to Proguard.
+ -->
+ <target name="-obfuscate">
+ <if condition="${proguard.enabled}">
+ <then>
+ <property name="obfuscate.absolute.dir" location="${out.absolute.dir}/proguard"/>
+ <property name="preobfuscate.jar.file" value="${obfuscate.absolute.dir}/original.jar"/>
+ <property name="obfuscated.jar.file" value="${obfuscate.absolute.dir}/obfuscated.jar"/>
+ <!-- input for dex will be proguard's output -->
+ <property name="out.dex.input.absolute.dir" value="${obfuscated.jar.file}"/>
+
+ <!-- Add Proguard Tasks -->
+ <property name="proguard.jar" location="${android.tools.dir}/proguard/lib/proguard.jar"/>
+ <taskdef name="proguard" classname="proguard.ant.ProGuardTask" classpath="${proguard.jar}"/>
+
+ <!-- Set the android classpath Path object into a single property. It'll be
+ all the jar files separated by a platform path-separator.
+ Each path must be quoted if it contains spaces.
+ -->
+ <pathconvert property="project.target.classpath.value" refid="project.target.class.path">
+ <firstmatchmapper>
+ <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+ <identitymapper/>
+ </firstmatchmapper>
+ </pathconvert>
+
+ <!-- Build a path object with all the jar files that must be obfuscated.
+ This include the project compiled source code and any 3rd party jar
+ files. -->
+ <path id="project.all.classes.path">
+ <pathelement location="${preobfuscate.jar.file}"/>
+ <path refid="project.all.jars.path"/>
+ <!-- Pass javac.custom.classpath for apks. -->
+ <path refid="javac.custom.classpath"/>
+ </path>
+ <!-- Set the project jar files Path object into a single property. It'll be
+ all the jar files separated by a platform path-separator.
+ Each path must be quoted if it contains spaces.
+ -->
+ <pathconvert property="project.all.classes.value" refid="project.all.classes.path">
+ <firstmatchmapper>
+ <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+ <identitymapper/>
+ </firstmatchmapper>
+ </pathconvert>
+
+ <!-- Turn the path property ${proguard.config} from an A:B:C property
+ into a series of includes: -include A -include B -include C
+ suitable for processing by the ProGuard task. Note - this does
+ not include the leading '-include "' or the closing '"'; those
+ are added under the <proguard> call below.
+ -->
+ <path id="proguard.configpath">
+ <pathelement path="${proguard.config}"/>
+ </path>
+ <pathconvert pathsep='" -include "' property="proguard.configcmd"
+ refid="proguard.configpath"/>
+
+ <mkdir dir="${obfuscate.absolute.dir}"/>
+ <delete file="${preobfuscate.jar.file}"/>
+ <delete file="${obfuscated.jar.file}"/>
+ <jar basedir="${out.classes.absolute.dir}"
+ destfile="${preobfuscate.jar.file}"/>
+ <proguard>
+ -include "${proguard.configcmd}"
+ -include "${out.absolute.dir}/proguard.txt"
+ -injars ${project.all.classes.value}
+ -outjars "${obfuscated.jar.file}"
+ -libraryjars ${project.target.classpath.value}
+ -dump "${obfuscate.absolute.dir}/dump.txt"
+ -printseeds "${obfuscate.absolute.dir}/seeds.txt"
+ -printusage "${obfuscate.absolute.dir}/usage.txt"
+ -printmapping "${obfuscate.absolute.dir}/mapping.txt"
+ </proguard>
+ </then>
+ </if>
+ </target>
+</project>
diff --git a/media/webrtc/trunk/build/android/buildbot_fyi_builder.sh b/media/webrtc/trunk/build/android/buildbot_fyi_builder.sh
new file mode 100755
index 000000000..627d41776
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_fyi_builder.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_fyi_builder.sh "$@"
diff --git a/media/webrtc/trunk/build/android/buildbot_fyi_tester.sh b/media/webrtc/trunk/build/android/buildbot_fyi_tester.sh
new file mode 100755
index 000000000..476e7f5e6
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_fyi_tester.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_fyi_tester.sh "$@"
diff --git a/media/webrtc/trunk/build/android/buildbot_main.sh b/media/webrtc/trunk/build/android/buildbot_main.sh
new file mode 100755
index 000000000..7d76c22cc
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_main.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_main_builder.sh "$@"
diff --git a/media/webrtc/trunk/build/android/buildbot_try_builder.sh b/media/webrtc/trunk/build/android/buildbot_try_builder.sh
new file mode 100755
index 000000000..170a42302
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_try_builder.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_try_builder.sh "$@"
diff --git a/media/webrtc/trunk/build/android/buildbot_try_compile.sh b/media/webrtc/trunk/build/android/buildbot_try_compile.sh
new file mode 100755
index 000000000..b3b60670b
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_try_compile.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_try_compile.sh "$@"
diff --git a/media/webrtc/trunk/build/android/buildbot_try_tester.sh b/media/webrtc/trunk/build/android/buildbot_try_tester.sh
new file mode 100755
index 000000000..1be48c499
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_try_tester.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_try_tester.sh "$@"
diff --git a/media/webrtc/trunk/build/android/buildbot_webkit_main.sh b/media/webrtc/trunk/build/android/buildbot_webkit_main.sh
new file mode 100755
index 000000000..50761d02a
--- /dev/null
+++ b/media/webrtc/trunk/build/android/buildbot_webkit_main.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Temporary placeholders to call new buildbot script locations until
+# buildbot master config can be pointed to new location.
+
+exec $(dirname $0)/buildbot/bb_webkit_latest_builder.sh "$@"
diff --git a/media/webrtc/trunk/build/android/cpufeatures.gypi b/media/webrtc/trunk/build/android/cpufeatures.gypi
new file mode 100644
index 000000000..72728a170
--- /dev/null
+++ b/media/webrtc/trunk/build/android/cpufeatures.gypi
@@ -0,0 +1,6 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#dummy gypi: contents should be discarded due to an enclosing 'conditions:' element.
+{}
diff --git a/media/webrtc/trunk/build/android/device_stats_monitor.py b/media/webrtc/trunk/build/android/device_stats_monitor.py
new file mode 100755
index 000000000..181c3db5f
--- /dev/null
+++ b/media/webrtc/trunk/build/android/device_stats_monitor.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides iotop/top style profiling for android.
+
+Usage:
+ ./device_stats_monitor.py --hz=20 --duration=5 --outfile=/tmp/foo
+"""
+
+import optparse
+import os
+import sys
+import time
+
+from pylib import android_commands
+from pylib import device_stats_monitor
+from pylib import test_options_parser
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--hz', type='int', default=20,
+ help='Number of samples/sec.')
+ option_parser.add_option('--duration', type='int', default=5,
+ help='Seconds to monitor.')
+ option_parser.add_option('--outfile', default='/tmp/devicestatsmonitor',
+ help='Location to start output file.')
+ test_options_parser.AddBuildTypeOption(option_parser)
+ options, args = option_parser.parse_args(argv)
+
+ monitor = device_stats_monitor.DeviceStatsMonitor(
+ android_commands.AndroidCommands(), options.hz, options.build_type)
+ monitor.Start()
+ print 'Waiting for %d seconds while profiling.' % options.duration
+ time.sleep(options.duration)
+ url = monitor.StopAndCollect(options.outfile)
+ print 'View results in browser at %s' % url
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/android/device_status_check.py b/media/webrtc/trunk/build/android/device_status_check.py
new file mode 100755
index 000000000..3d695a223
--- /dev/null
+++ b/media/webrtc/trunk/build/android/device_status_check.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class to keep track of devices across builds and report state."""
+import logging
+import optparse
+import os
+import smtplib
+import sys
+
+from pylib import buildbot_report
+from pylib.android_commands import GetAttachedDevices
+from pylib.cmd_helper import GetCmdOutput
+
+
+def DeviceInfo(serial):
+ """Gathers info on a device via various adb calls.
+
+ Args:
+ serial: The serial of the attached device to construct info about.
+
+ Returns:
+ Tuple of device type, build id and report as a string.
+ """
+
+ def AdbShellCmd(cmd):
+ return GetCmdOutput('adb -s %s shell %s' % (serial, cmd),
+ shell=True).strip()
+
+ device_type = AdbShellCmd('getprop ro.build.product')
+ device_build = AdbShellCmd('getprop ro.build.id')
+
+ report = ['Device %s (%s)' % (serial, device_type),
+ ' Build: %s (%s)' % (device_build,
+ AdbShellCmd('getprop ro.build.fingerprint')),
+ ' Battery: %s%%' % AdbShellCmd('dumpsys battery | grep level '
+ "| awk '{print $2}'"),
+ ' Battery temp: %s' % AdbShellCmd('dumpsys battery'
+ '| grep temp '
+ "| awk '{print $2}'"),
+ ' IMEI slice: %s' % AdbShellCmd('dumpsys iphonesubinfo '
+ '| grep Device'
+ "| awk '{print $4}'")[-6:],
+ ' Wifi IP: %s' % AdbShellCmd('getprop dhcp.wlan0.ipaddress'),
+ '']
+
+ return device_type, device_build, '\n'.join(report)
+
+
+def CheckForMissingDevices(options, adb_online_devs):
+ """Uses file of previous online devices to detect broken phones.
+
+ Args:
+ options: out_dir parameter of options argument is used as the base
+ directory to load and update the cache file.
+ adb_online_devs: A list of serial numbers of the currently visible
+ and online attached devices.
+ """
+ # TODO(navabi): remove this once the bug that causes different number
+ # of devices to be detected between calls is fixed.
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+
+ out_dir = os.path.abspath(options.out_dir)
+
+ def ReadDeviceList(file_name):
+ devices_path = os.path.join(out_dir, file_name)
+ devices = []
+ try:
+ with open(devices_path) as f:
+ devices = f.read().splitlines()
+ except IOError:
+ # Ignore error, file might not exist
+ pass
+ return devices
+
+ def WriteDeviceList(file_name, device_list):
+ path = os.path.join(out_dir, file_name)
+ if not os.path.exists(out_dir):
+ os.makedirs(out_dir)
+ with open(path, 'w') as f:
+ # Write devices currently visible plus devices previously seen.
+ f.write('\n'.join(set(device_list)))
+
+ last_devices_path = os.path.join(out_dir, '.last_devices')
+ last_devices = ReadDeviceList('.last_devices')
+
+ missing_devs = list(set(last_devices) - set(adb_online_devs))
+ if missing_devs:
+ from_address = 'buildbot@chromium.org'
+ to_address = 'chromium-android-device-alerts@google.com'
+ bot_name = os.environ['BUILDBOT_BUILDERNAME']
+ slave_name = os.environ['BUILDBOT_SLAVENAME']
+ num_online_devs = len(adb_online_devs)
+ subject = 'Devices offline on %s, %s (%d remaining).' % (slave_name,
+ bot_name,
+ num_online_devs)
+ buildbot_report.PrintWarning()
+ devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+ buildbot_report.PrintSummaryText(devices_missing_msg)
+
+ # TODO(navabi): Debug by printing both output from GetCmdOutput and
+ # GetAttachedDevices to compare results.
+ body = '\n'.join(
+ ['Current online devices: %s' % adb_online_devs,
+ '%s are no longer visible. Were they removed?\n' % missing_devs,
+ 'SHERIFF: See go/chrome_device_monitor',
+ 'Cache file: %s\n\n' % last_devices_path,
+ 'adb devices: %s' % GetCmdOutput(['adb', 'devices']),
+ 'adb devices(GetAttachedDevices): %s' % GetAttachedDevices()])
+
+ print body
+
+ # Only send email if the first time a particular device goes offline
+ last_missing = ReadDeviceList('.last_missing')
+ new_missing_devs = set(missing_devs) - set(last_missing)
+
+ if new_missing_devs:
+ msg_body = '\r\n'.join(
+ ['From: %s' % from_address,
+ 'To: %s' % to_address,
+ 'Subject: %s' % subject,
+ '', body])
+ try:
+ server = smtplib.SMTP('localhost')
+ server.sendmail(from_address, [to_address], msg_body)
+ server.quit()
+ except Exception as e:
+ print 'Failed to send alert email. Error: %s' % e
+ else:
+ new_devs = set(adb_online_devs) - set(last_devices)
+ if new_devs and os.path.exists(last_devices_path):
+ buildbot_report.PrintWarning()
+ buildbot_report.PrintSummaryText(
+ '%d new devices detected' % len(new_devs))
+ print ('New devices detected %s. And now back to your '
+ 'regularly scheduled program.' % list(new_devs))
+ WriteDeviceList('.last_devices', (adb_online_devs + last_devices))
+ WriteDeviceList('.last_missing', missing_devs)
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('', '--out-dir',
+ help='Directory where the device path is stored',
+ default=os.path.join(os.path.dirname(__file__), '..',
+ '..', 'out'))
+
+ options, args = parser.parse_args()
+ if args:
+ parser.error('Unknown options %s' % args)
+ buildbot_report.PrintNamedStep('Device Status Check')
+ devices = GetAttachedDevices()
+ types, builds, reports = [], [], []
+ if devices:
+ types, builds, reports = zip(*[DeviceInfo(dev) for dev in devices])
+
+ unique_types = list(set(types))
+ unique_builds = list(set(builds))
+
+ buildbot_report.PrintMsg('Online devices: %d. Device types %s, builds %s'
+ % (len(devices), unique_types, unique_builds))
+ print '\n'.join(reports)
+ CheckForMissingDevices(options, devices)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/android/emulator.py b/media/webrtc/trunk/build/android/emulator.py
new file mode 100755
index 000000000..77c9a75da
--- /dev/null
+++ b/media/webrtc/trunk/build/android/emulator.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to start and stop Android emulator.
+
+Assumes system environment ANDROID_NDK_ROOT has been set.
+
+ Emulator: The class provides the methods to launch/shutdown the emulator with
+ the android virtual device named 'avd_armeabi' .
+"""
+
+import logging
+import os
+import signal
+import subprocess
+import sys
+import time
+
+from pylib import android_commands
+from pylib import cmd_helper
+
+# adb_interface.py is under ../../third_party/android_testrunner/
+sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
+ '..', 'third_party', 'android_testrunner'))
+import adb_interface
+import errors
+import run_command
+
+class EmulatorLaunchException(Exception):
+ """Emulator failed to launch."""
+ pass
+
+def _KillAllEmulators():
+ """Kill all running emulators that look like ones we started.
+
+ There are odd 'sticky' cases where there can be no emulator process
+ running but a device slot is taken. A little bot trouble and and
+ we're out of room forever.
+ """
+ emulators = android_commands.GetEmulators()
+ if not emulators:
+ return
+ for emu_name in emulators:
+ cmd_helper.GetCmdOutput(['adb', '-s', emu_name, 'emu', 'kill'])
+ logging.info('Emulator killing is async; give a few seconds for all to die.')
+ for i in range(5):
+ if not android_commands.GetEmulators():
+ return
+ time.sleep(1)
+
+
+def DeleteAllTempAVDs():
+ """Delete all temporary AVDs which are created for tests.
+
+ If the test exits abnormally and some temporary AVDs created when testing may
+ be left in the system. Clean these AVDs.
+ """
+ avds = android_commands.GetAVDs()
+ if not avds:
+ return
+ for avd_name in avds:
+ if 'run_tests_avd' in avd_name:
+ cmd = ['android', '-s', 'delete', 'avd', '--name', avd_name]
+ cmd_helper.GetCmdOutput(cmd)
+ logging.info('Delete AVD %s' % avd_name)
+
+
+class PortPool(object):
+ """Pool for emulator port starting position that changes over time."""
+ _port_min = 5554
+ _port_max = 5585
+ _port_current_index = 0
+
+ @classmethod
+ def port_range(cls):
+ """Return a range of valid ports for emulator use.
+
+ The port must be an even number between 5554 and 5584. Sometimes
+ a killed emulator "hangs on" to a port long enough to prevent
+ relaunch. This is especially true on slow machines (like a bot).
+ Cycling through a port start position helps make us resilient."""
+ ports = range(cls._port_min, cls._port_max, 2)
+ n = cls._port_current_index
+ cls._port_current_index = (n + 1) % len(ports)
+ return ports[n:] + ports[:n]
+
+
+def _GetAvailablePort():
+ """Returns an available TCP port for the console."""
+ used_ports = []
+ emulators = android_commands.GetEmulators()
+ for emulator in emulators:
+ used_ports.append(emulator.split('-')[1])
+ for port in PortPool.port_range():
+ if str(port) not in used_ports:
+ return port
+
+
+class Emulator(object):
+ """Provides the methods to lanuch/shutdown the emulator.
+
+ The emulator has the android virtual device named 'avd_armeabi'.
+
+ The emulator could use any even TCP port between 5554 and 5584 for the
+ console communication, and this port will be part of the device name like
+ 'emulator-5554'. Assume it is always True, as the device name is the id of
+ emulator managed in this class.
+
+ Attributes:
+ emulator: Path of Android's emulator tool.
+ popen: Popen object of the running emulator process.
+ device: Device name of this emulator.
+ """
+
+ # Signals we listen for to kill the emulator on
+ _SIGNALS = (signal.SIGINT, signal.SIGHUP)
+
+ # Time to wait for an emulator launch, in seconds. This includes
+ # the time to launch the emulator and a wait-for-device command.
+ _LAUNCH_TIMEOUT = 120
+
+ # Timeout interval of wait-for-device command before bouncing to a a
+ # process life check.
+ _WAITFORDEVICE_TIMEOUT = 5
+
+ # Time to wait for a "wait for boot complete" (property set on device).
+ _WAITFORBOOT_TIMEOUT = 300
+
+ def __init__(self, new_avd_name, fast_and_loose):
+ """Init an Emulator.
+
+ Args:
+ nwe_avd_name: If set, will create a new temporary AVD.
+ fast_and_loose: Loosen up the rules for reliable running for speed.
+ Intended for quick testing or re-testing.
+
+ """
+ try:
+ android_sdk_root = os.environ['ANDROID_SDK_ROOT']
+ except KeyError:
+ logging.critical('The ANDROID_SDK_ROOT must be set to run the test on '
+ 'emulator.')
+ raise
+ self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator')
+ self.android = os.path.join(android_sdk_root, 'tools', 'android')
+ self.popen = None
+ self.device = None
+ self.default_avd = True
+ self.fast_and_loose = fast_and_loose
+ self.abi = 'armeabi-v7a'
+ self.avd = 'avd_armeabi'
+ if 'x86' in os.environ.get('TARGET_PRODUCT', ''):
+ self.abi = 'x86'
+ self.avd = 'avd_x86'
+ if new_avd_name:
+ self.default_avd = False
+ self.avd = self._CreateAVD(new_avd_name)
+
+ def _DeviceName(self):
+ """Return our device name."""
+ port = _GetAvailablePort()
+ return ('emulator-%d' % port, port)
+
+ def _CreateAVD(self, avd_name):
+ """Creates an AVD with the given name.
+
+ Return avd_name.
+ """
+ avd_command = [
+ self.android,
+ '--silent',
+ 'create', 'avd',
+ '--name', avd_name,
+ '--abi', self.abi,
+ '--target', 'android-16',
+ '-c', '128M',
+ '--force',
+ ]
+ avd_process = subprocess.Popen(args=avd_command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ avd_process.stdin.write('no\n')
+ avd_process.wait()
+ logging.info('Create AVD command: %s', ' '.join(avd_command))
+ return avd_name
+
+ def _DeleteAVD(self):
+ """Delete the AVD of this emulator."""
+ avd_command = [
+ self.android,
+ '--silent',
+ 'delete',
+ 'avd',
+ '--name', self.avd,
+ ]
+ avd_process = subprocess.Popen(args=avd_command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ logging.info('Delete AVD command: %s', ' '.join(avd_command))
+ avd_process.wait()
+
+ def Launch(self, kill_all_emulators):
+ """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the
+ emulator is ready for use.
+
+ If fails, an exception will be raised.
+ """
+ if kill_all_emulators:
+ _KillAllEmulators() # just to be sure
+ if not self.fast_and_loose:
+ self._AggressiveImageCleanup()
+ (self.device, port) = self._DeviceName()
+ emulator_command = [
+ self.emulator,
+ # Speed up emulator launch by 40%. Really.
+ '-no-boot-anim',
+ # The default /data size is 64M.
+ # That's not enough for 8 unit test bundles and their data.
+ '-partition-size', '512',
+ # Enable GPU by default.
+ '-gpu', 'on',
+ # Use a familiar name and port.
+ '-avd', self.avd,
+ '-port', str(port)]
+ if not self.fast_and_loose:
+ emulator_command.extend([
+ # Wipe the data. We've seen cases where an emulator
+ # gets 'stuck' if we don't do this (every thousand runs or
+ # so).
+ '-wipe-data',
+ ])
+ logging.info('Emulator launch command: %s', ' '.join(emulator_command))
+ self.popen = subprocess.Popen(args=emulator_command,
+ stderr=subprocess.STDOUT)
+ self._InstallKillHandler()
+
+ def _AggressiveImageCleanup(self):
+ """Aggressive cleanup of emulator images.
+
+ Experimentally it looks like our current emulator use on the bot
+ leaves image files around in /tmp/android-$USER. If a "random"
+ name gets reused, we choke with a 'File exists' error.
+ TODO(jrg): is there a less hacky way to accomplish the same goal?
+ """
+ logging.info('Aggressive Image Cleanup')
+ emulator_imagedir = '/tmp/android-%s' % os.environ['USER']
+ if not os.path.exists(emulator_imagedir):
+ return
+ for image in os.listdir(emulator_imagedir):
+ full_name = os.path.join(emulator_imagedir, image)
+ if 'emulator' in full_name:
+ logging.info('Deleting emulator image %s', full_name)
+ os.unlink(full_name)
+
+ def ConfirmLaunch(self, wait_for_boot=False):
+ """Confirm the emulator launched properly.
+
+ Loop on a wait-for-device with a very small timeout. On each
+ timeout, check the emulator process is still alive.
+ After confirming a wait-for-device can be successful, make sure
+ it returns the right answer.
+ """
+ seconds_waited = 0
+ number_of_waits = 2 # Make sure we can wfd twice
+ adb_cmd = "adb -s %s %s" % (self.device, 'wait-for-device')
+ while seconds_waited < self._LAUNCH_TIMEOUT:
+ try:
+ run_command.RunCommand(adb_cmd,
+ timeout_time=self._WAITFORDEVICE_TIMEOUT,
+ retry_count=1)
+ number_of_waits -= 1
+ if not number_of_waits:
+ break
+ except errors.WaitForResponseTimedOutError as e:
+ seconds_waited += self._WAITFORDEVICE_TIMEOUT
+ adb_cmd = "adb -s %s %s" % (self.device, 'kill-server')
+ run_command.RunCommand(adb_cmd)
+ self.popen.poll()
+ if self.popen.returncode != None:
+ raise EmulatorLaunchException('EMULATOR DIED')
+ if seconds_waited >= self._LAUNCH_TIMEOUT:
+ raise EmulatorLaunchException('TIMEOUT with wait-for-device')
+ logging.info('Seconds waited on wait-for-device: %d', seconds_waited)
+ if wait_for_boot:
+ # Now that we checked for obvious problems, wait for a boot complete.
+ # Waiting for the package manager is sometimes problematic.
+ a = android_commands.AndroidCommands(self.device)
+ a.WaitForSystemBootCompleted(self._WAITFORBOOT_TIMEOUT)
+
+ def Shutdown(self):
+ """Shuts down the process started by launch."""
+ if not self.default_avd:
+ self._DeleteAVD()
+ if self.popen:
+ self.popen.poll()
+ if self.popen.returncode == None:
+ self.popen.kill()
+ self.popen = None
+
+ def _ShutdownOnSignal(self, signum, frame):
+ logging.critical('emulator _ShutdownOnSignal')
+ for sig in self._SIGNALS:
+ signal.signal(sig, signal.SIG_DFL)
+ self.Shutdown()
+ raise KeyboardInterrupt # print a stack
+
+ def _InstallKillHandler(self):
+ """Install a handler to kill the emulator when we exit unexpectedly."""
+ for sig in self._SIGNALS:
+ signal.signal(sig, self._ShutdownOnSignal)
+
+def main(argv):
+ Emulator(None, True).Launch(True)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/media/webrtc/trunk/build/android/enable_asserts.py b/media/webrtc/trunk/build/android/enable_asserts.py
new file mode 100755
index 000000000..5659e9e2a
--- /dev/null
+++ b/media/webrtc/trunk/build/android/enable_asserts.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Enables dalvik vm asserts in the android device."""
+
+from pylib import android_commands
+import optparse
+import sys
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--enable_asserts', dest='set_asserts',
+ action='store_true', default=None,
+ help='Sets the dalvik.vm.enableassertions property to "all"')
+ option_parser.add_option('--disable_asserts', dest='set_asserts',
+ action='store_false', default=None,
+ help='Removes the dalvik.vm.enableassertions property')
+ options, _ = option_parser.parse_args(argv)
+
+ commands = android_commands.AndroidCommands()
+ if options.set_asserts != None:
+ if commands.SetJavaAssertsEnabled(options.set_asserts):
+ commands.Reboot(full_reboot=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/media/webrtc/trunk/build/android/envsetup.sh b/media/webrtc/trunk/build/android/envsetup.sh
new file mode 100755
index 000000000..f94ca65b7
--- /dev/null
+++ b/media/webrtc/trunk/build/android/envsetup.sh
@@ -0,0 +1,133 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Sets up environment for building Chromium on Android. It can either be
+# compiled with the Android tree or using the Android SDK/NDK. To build with
+# NDK/SDK: ". build/android/envsetup.sh --sdk". Environment variable
+# ANDROID_SDK_BUILD=1 will then be defined and used in the rest of the setup to
+# specifiy build type.
+
+# When building WebView as part of Android we can't use the SDK. Other builds
+# default to using the SDK.
+# NOTE(yfriedman): This looks unnecessary but downstream the default value
+# should be 0 until all builds switch to SDK/NDK.
+if [[ "${CHROME_ANDROID_BUILD_WEBVIEW}" -eq 1 ]]; then
+ export ANDROID_SDK_BUILD=0
+else
+ export ANDROID_SDK_BUILD=1
+fi
+# Loop over args in case we add more arguments in the future.
+while [ "$1" != "" ]; do
+ case $1 in
+ -s | --sdk ) export ANDROID_SDK_BUILD=1 ; shift ;;
+ * ) shift ; break ;;
+ esac
+done
+
+if [[ "${ANDROID_SDK_BUILD}" -eq 1 ]]; then
+ echo "Using SDK build"
+fi
+
+host_os=$(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')
+
+case "${host_os}" in
+ "linux")
+ toolchain_dir="linux-x86_64"
+ ;;
+ "mac")
+ toolchain_dir="darwin-x86"
+ ;;
+ *)
+ echo "Host platform ${host_os} is not supported" >& 2
+ return 1
+esac
+
+CURRENT_DIR="$(readlink -f "$(dirname $BASH_SOURCE)/../../")"
+if [[ -z "${CHROME_SRC}" ]]; then
+ # If $CHROME_SRC was not set, assume current directory is CHROME_SRC.
+ export CHROME_SRC="${CURRENT_DIR}"
+fi
+
+if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then
+ # If current directory is not in $CHROME_SRC, it might be set for other
+ # source tree. If $CHROME_SRC was set correctly and we are in the correct
+ # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "".
+ # Otherwise, it will equal to "${CURRENT_DIR}"
+ echo "Warning: Current directory is out of CHROME_SRC, it may not be \
+the one you want."
+ echo "${CHROME_SRC}"
+fi
+
+# Android sdk platform version to use
+export ANDROID_SDK_VERSION=16
+
+# Source functions script. The file is in the same directory as this script.
+. "$(dirname $BASH_SOURCE)"/envsetup_functions.sh
+
+if [[ "${ANDROID_SDK_BUILD}" -eq 1 ]]; then
+ sdk_build_init
+# Sets up environment for building Chromium for Android with source. Expects
+# android environment setup and lunch.
+elif [[ -z "$ANDROID_BUILD_TOP" || \
+ -z "$ANDROID_TOOLCHAIN" || \
+ -z "$ANDROID_PRODUCT_OUT" ]]; then
+ echo "Android build environment variables must be set."
+ echo "Please cd to the root of your Android tree and do: "
+ echo " . build/envsetup.sh"
+ echo " lunch"
+ echo "Then try this again."
+ echo "Or did you mean NDK/SDK build. Run envsetup.sh with --sdk argument."
+ return 1
+elif [[ -n "$CHROME_ANDROID_BUILD_WEBVIEW" ]]; then
+ webview_build_init
+else
+ non_sdk_build_init
+fi
+
+# Workaround for valgrind build
+if [[ -n "$CHROME_ANDROID_VALGRIND_BUILD" ]]; then
+# arm_thumb=0 is a workaround for https://bugs.kde.org/show_bug.cgi?id=270709
+ DEFINES+=" arm_thumb=0 release_extra_cflags='-fno-inline\
+ -fno-omit-frame-pointer -fno-builtin' release_valgrind_build=1\
+ release_optimize=1"
+fi
+
+# Source a bunch of helper functions
+. ${CHROME_SRC}/build/android/adb_device_functions.sh
+
+ANDROID_GOMA_WRAPPER=""
+if [[ -d $GOMA_DIR ]]; then
+ ANDROID_GOMA_WRAPPER="$GOMA_DIR/gomacc"
+fi
+export ANDROID_GOMA_WRAPPER
+
+# Declare Android are cross compile.
+export GYP_CROSSCOMPILE=1
+
+export CXX_target="${ANDROID_GOMA_WRAPPER} \
+ $(echo -n ${ANDROID_TOOLCHAIN}/*-g++)"
+
+# Performs a gyp_chromium run to convert gyp->Makefile for android code.
+android_gyp() {
+ echo "GYP_GENERATORS set to '$GYP_GENERATORS'"
+ # http://crbug.com/143889.
+ # In case we are doing a Clang build, we have to unset CC_target and
+ # CXX_target. Otherwise GYP ends up generating a gcc build (although we set
+ # 'clang' to 1). This behavior was introduced by
+ # 54d2f6fe6d8a7b9d9786bd1f8540df6b4f46b83f in GYP.
+ (
+ # Fork to avoid side effects on the user's environment variables.
+ if echo "$GYP_DEFINES" | grep -qE '(clang|asan)'; then
+ if echo "$CXX_target" | grep -q g++; then
+ unset CXX_target
+ fi
+ fi
+ "${CHROME_SRC}/build/gyp_chromium" --depth="${CHROME_SRC}" --check "$@"
+ )
+}
+
+# FLOCK needs to be null on system that has no flock
+which flock > /dev/null || export FLOCK=
diff --git a/media/webrtc/trunk/build/android/envsetup_functions.sh b/media/webrtc/trunk/build/android/envsetup_functions.sh
new file mode 100755
index 000000000..da8a4ee57
--- /dev/null
+++ b/media/webrtc/trunk/build/android/envsetup_functions.sh
@@ -0,0 +1,334 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines functions for envsetup.sh which sets up environment for building
+# Chromium on Android. The build can be either use the Android NDK/SDK or
+# android source tree. Each has a unique init function which calls functions
+# prefixed with "common_" that is common for both environment setups.
+
+################################################################################
+# Check to make sure the toolchain exists for the NDK version.
+################################################################################
+common_check_toolchain() {
+ if [[ ! -d "${ANDROID_TOOLCHAIN}" ]]; then
+ echo "Can not find Android toolchain in ${ANDROID_TOOLCHAIN}." >& 2
+ echo "The NDK version might be wrong." >& 2
+ return 1
+ fi
+}
+
+################################################################################
+# Exports environment variables common to both sdk and non-sdk build (e.g. PATH)
+# based on CHROME_SRC and ANDROID_TOOLCHAIN, along with DEFINES for GYP_DEFINES.
+################################################################################
+common_vars_defines() {
+
+ # Set toolchain path according to product architecture.
+ toolchain_arch="arm-linux-androideabi"
+ if [[ "${TARGET_PRODUCT}" =~ .*x86.* ]]; then
+ toolchain_arch="x86"
+ fi
+
+ toolchain_version="4.6"
+ toolchain_target=$(basename \
+ ${ANDROID_NDK_ROOT}/toolchains/${toolchain_arch}-${toolchain_version})
+ toolchain_path="${ANDROID_NDK_ROOT}/toolchains/${toolchain_target}"\
+"/prebuilt/${toolchain_dir}/bin/"
+
+ # Set only if not already set.
+ # Don't override ANDROID_TOOLCHAIN if set by Android configuration env.
+ export ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN:-${toolchain_path}}
+
+ common_check_toolchain
+
+ # Add Android SDK/NDK tools to system path.
+ export PATH=$PATH:${ANDROID_NDK_ROOT}
+ export PATH=$PATH:${ANDROID_SDK_ROOT}/tools
+ export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+
+ # This must be set before ANDROID_TOOLCHAIN, so that clang could find the
+ # gold linker.
+ # TODO(michaelbai): Remove this path once the gold linker become the default
+ # linker.
+ export PATH=$PATH:${CHROME_SRC}/build/android/${toolchain_arch}-gold
+
+ # Must have tools like arm-linux-androideabi-gcc on the path for ninja
+ export PATH=$PATH:${ANDROID_TOOLCHAIN}
+
+ # Add Chromium Android development scripts to system path.
+ # Must be after CHROME_SRC is set.
+ export PATH=$PATH:${CHROME_SRC}/build/android
+
+ # TODO(beverloo): Remove these once all consumers updated to --strip-binary.
+ export OBJCOPY=$(echo ${ANDROID_TOOLCHAIN}/*-objcopy)
+ export STRIP=$(echo ${ANDROID_TOOLCHAIN}/*-strip)
+
+ # The set of GYP_DEFINES to pass to gyp. Use 'readlink -e' on directories
+ # to canonicalize them (remove double '/', remove trailing '/', etc).
+ DEFINES="OS=android"
+ DEFINES+=" host_os=${host_os}"
+
+ if [[ -n "$CHROME_ANDROID_OFFICIAL_BUILD" ]]; then
+ DEFINES+=" branding=Chrome"
+ DEFINES+=" buildtype=Official"
+
+ # These defines are used by various chrome build scripts to tag the binary's
+ # version string as 'official' in linux builds (e.g. in
+ # chrome/trunk/src/chrome/tools/build/version.py).
+ export OFFICIAL_BUILD=1
+ export CHROMIUM_BUILD="_google_chrome"
+ export CHROME_BUILD_TYPE="_official"
+
+ # Used by chrome_version_info_posix.cc to display the channel name.
+ # Valid values: "unstable", "stable", "dev", "beta".
+ export CHROME_VERSION_EXTRA="beta"
+ fi
+
+ # The order file specifies the order of symbols in the .text section of the
+ # shared library, libchromeview.so. The file is an order list of section
+ # names and the library is linked with option
+ # --section-ordering-file=<orderfile>. The order file is updated by profiling
+ # startup after compiling with the order_profiling=1 GYP_DEFINES flag.
+ ORDER_DEFINES="order_text_section=${CHROME_SRC}/orderfiles/orderfile.out"
+
+ # The following defines will affect ARM code generation of both C/C++ compiler
+ # and V8 mksnapshot.
+ case "${TARGET_PRODUCT}" in
+ "passion"|"soju"|"sojua"|"sojus"|"yakju"|"mysid"|"nakasi")
+ DEFINES+=" arm_neon=1 armv7=1 arm_thumb=1"
+ DEFINES+=" ${ORDER_DEFINES}"
+ TARGET_ARCH="arm"
+ ;;
+ "trygon"|"tervigon")
+ DEFINES+=" arm_neon=0 armv7=1 arm_thumb=1 arm_fpu=vfpv3-d16"
+ DEFINES+=" ${ORDER_DEFINES}"
+ TARGET_ARCH="arm"
+ ;;
+ "full")
+ DEFINES+=" arm_neon=0 armv7=0 arm_thumb=1 arm_fpu=vfp"
+ TARGET_ARCH="arm"
+ ;;
+ *x86*)
+ # TODO(tedbo): The ia32 build fails on ffmpeg, so we disable it here.
+ DEFINES+=" use_libffmpeg=0"
+
+ host_arch=$(uname -m | sed -e \
+ 's/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/i86pc/ia32/')
+ DEFINES+=" host_arch=${host_arch}"
+ TARGET_ARCH="x86"
+ ;;
+ *)
+ echo "TARGET_PRODUCT: ${TARGET_PRODUCT} is not supported." >& 2
+ return 1
+ esac
+
+ case "${TARGET_ARCH}" in
+ "arm")
+ DEFINES+=" target_arch=arm"
+ ;;
+ "x86")
+ DEFINES+=" target_arch=ia32"
+ ;;
+ *)
+ echo "TARGET_ARCH: ${TARGET_ARCH} is not supported." >& 2
+ return 1
+ esac
+
+ DEFINES+=" android_gdbserver=${ANDROID_NDK_ROOT}/prebuilt/\
+android-${TARGET_ARCH}/gdbserver/gdbserver"
+}
+
+
+################################################################################
+# Exports common GYP variables based on variable DEFINES and CHROME_SRC.
+################################################################################
+common_gyp_vars() {
+ export GYP_DEFINES="${DEFINES}"
+
+ # Set GYP_GENERATORS to make-android if it's currently unset or null.
+ export GYP_GENERATORS="${GYP_GENERATORS:-make-android}"
+
+ # Use our All target as the default
+ export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} default_target=All"
+
+ # We want to use our version of "all" targets.
+ export CHROMIUM_GYP_FILE="${CHROME_SRC}/build/all_android.gyp"
+}
+
+
+################################################################################
+# Initializes environment variables for NDK/SDK build. Only Android NDK Revision
+# 7 on Linux or Mac is offically supported. To run this script, the system
+# environment ANDROID_NDK_ROOT must be set to Android NDK's root path. The
+# ANDROID_SDK_ROOT only needs to be set to override the default SDK which is in
+# the tree under $ROOT/src/third_party/android_tools/sdk.
+# TODO(navabi): Add NDK to $ROOT/src/third_party/android_tools/ndk.
+# To build Chromium for Android with NDK/SDK follow the steps below:
+# > export ANDROID_NDK_ROOT=<android ndk root>
+# > export ANDROID_SDK_ROOT=<android sdk root> # to override the default sdk
+# > . build/android/envsetup.sh --sdk
+# > make
+################################################################################
+sdk_build_init() {
+ # If ANDROID_NDK_ROOT is set when envsetup is run, use the ndk pointed to by
+ # the environment variable. Otherwise, use the default ndk from the tree.
+ if [[ -z "${ANDROID_NDK_ROOT}" || ! -d "${ANDROID_NDK_ROOT}" ]]; then
+ export ANDROID_NDK_ROOT="${CHROME_SRC}/third_party/android_tools/ndk/"
+ fi
+
+ # If ANDROID_SDK_ROOT is set when envsetup is run, and if it has the
+ # right SDK-compatible directory layout, use the sdk pointed to by the
+ # environment variable. Otherwise, use the default sdk from the tree.
+ local sdk_suffix=platforms/android-${ANDROID_SDK_VERSION}
+ if [[ -z "${ANDROID_SDK_ROOT}" || \
+ ! -d "${ANDROID_SDK_ROOT}/${sdk_suffix}" ]]; then
+ export ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/"
+ fi
+
+ # Makes sure ANDROID_BUILD_TOP is unset if build has option --sdk
+ unset ANDROID_BUILD_TOP
+
+ # Set default target.
+ export TARGET_PRODUCT="${TARGET_PRODUCT:-trygon}"
+
+ # Unset toolchain so that it can be set based on TARGET_PRODUCT.
+ # This makes it easy to switch between architectures.
+ unset ANDROID_TOOLCHAIN
+
+ common_vars_defines
+
+ DEFINES+=" sdk_build=1"
+ # If we are building NDK/SDK, and in the upstream (open source) tree,
+ # define a special variable for bringup purposes.
+ case "${ANDROID_BUILD_TOP-undefined}" in
+ "undefined")
+ DEFINES+=" android_upstream_bringup=1"
+ ;;
+ esac
+
+ # Sets android specific directories to NOT_SDK_COMPLIANT. This will allow
+ # android_gyp to generate make files, but will cause errors when (and only
+ # when) building targets that depend on these directories.
+ DEFINES+=" android_src='NOT_SDK_COMPLIANT'"
+ DEFINES+=" android_product_out=${CHROME_SRC}/out/android"
+ DEFINES+=" android_lib='NOT_SDK_COMPLIANT'"
+ DEFINES+=" android_static_lib='NOT_SDK_COMPLIANT'"
+ DEFINES+=" android_sdk=${ANDROID_SDK_ROOT}/${sdk_suffix}"
+ DEFINES+=" android_sdk_root=${ANDROID_SDK_ROOT}"
+ DEFINES+=" android_sdk_tools=${ANDROID_SDK_ROOT}/platform-tools"
+ DEFINES+=" android_sdk_version=${ANDROID_SDK_VERSION}"
+ DEFINES+=" android_toolchain=${ANDROID_TOOLCHAIN}"
+
+ common_gyp_vars
+
+ if [[ -n "$CHROME_ANDROID_BUILD_WEBVIEW" ]]; then
+ # Can not build WebView with NDK/SDK because it needs the Android build
+ # system and build inside an Android source tree.
+ echo "Can not build WebView with NDK/SDK. Requires android source tree." \
+ >& 2
+ echo "Try . build/android/envsetup.sh instead." >& 2
+ return 1
+ fi
+
+}
+
+################################################################################
+# Initializes environment variables for build with android source. This expects
+# android environment to be set up along with lunch. To build:
+# > . build/envsetup.sh
+# > lunch <lunch-type>
+# > . build/android/envsetup.sh
+# > make
+#############################################################################
+non_sdk_build_init() {
+ # We export "TOP" here so that "mmm" can be run to build Java code without
+ # having to cd to $ANDROID_BUILD_TOP.
+ export TOP="$ANDROID_BUILD_TOP"
+
+ # Set "ANDROID_NDK_ROOT" as checked-in version, if it was not set.
+ if [[ "${ANDROID_NDK_ROOT}" || ! -d "$ANDROID_NDK_ROOT" ]] ; then
+ export ANDROID_NDK_ROOT="${CHROME_SRC}/third_party/android_tools/ndk/"
+ fi
+ if [[ ! -d "${ANDROID_NDK_ROOT}" ]] ; then
+ echo "Can not find Android NDK root ${ANDROID_NDK_ROOT}." >& 2
+ return 1
+ fi
+
+ # We export "ANDROID_SDK_ROOT" for building Java source with the SDK.
+ export ANDROID_SDK_ROOT=${ANDROID_BUILD_TOP}/prebuilts/sdk/\
+${ANDROID_SDK_VERSION}
+ # Needed by android antfiles when creating apks.
+ export ANDROID_SDK_HOME=${ANDROID_SDK_ROOT}
+
+ # Unset ANDROID_TOOLCHAIN, so it could be set to checked-in 64-bit toolchain.
+ # in common_vars_defines
+ unset ANDROID_TOOLCHAIN
+
+ common_vars_defines
+
+ DEFINES+=" sdk_build=0"
+ DEFINES+=" android_product_out=${ANDROID_PRODUCT_OUT}"
+
+ if [[ -n "$CHROME_ANDROID_BUILD_WEBVIEW" ]]; then
+ webview_build_init
+ return
+ fi
+
+ # The non-SDK build currently requires the SDK path to build the framework
+ # Java aidl files. TODO(steveblock): Investigate avoiding this requirement.
+ DEFINES+=" android_sdk=${ANDROID_SDK_ROOT}"
+ DEFINES+=" android_sdk_root=${ANDROID_SDK_ROOT}"
+ DEFINES+=" android_sdk_tools=${ANDROID_SDK_ROOT}/../tools/linux"
+ DEFINES+=" android_sdk_version=${ANDROID_SDK_VERSION}"
+ DEFINES+=" android_toolchain=${ANDROID_TOOLCHAIN}"
+
+ common_gyp_vars
+}
+
+################################################################################
+# To build WebView, we use the Android build system and build inside an Android
+# source tree. This method is called from non_sdk_build_init() and adds to the
+# settings specified there.
+#############################################################################
+webview_build_init() {
+ # For the WebView build we always use the NDK and SDK in the Android tree,
+ # and we don't touch ANDROID_TOOLCHAIN which is already set by Android.
+ export ANDROID_NDK_ROOT=${ANDROID_BUILD_TOP}/prebuilts/ndk/8
+ export ANDROID_SDK_ROOT=${ANDROID_BUILD_TOP}/prebuilts/sdk/\
+${ANDROID_SDK_VERSION}
+
+ common_vars_defines
+
+ # We need to supply SDK paths relative to the top of the Android tree to make
+ # sure the generated Android makefiles are portable, as they will be checked
+ # into the Android tree.
+ ANDROID_SDK=$(python -c \
+ "import os.path; print os.path.relpath('${ANDROID_SDK_ROOT}', \
+ '${ANDROID_BUILD_TOP}')")
+ ANDROID_SDK_TOOLS=$(python -c \
+ "import os.path; \
+ print os.path.relpath('${ANDROID_SDK_ROOT}/../tools/linux', \
+ '${ANDROID_BUILD_TOP}')")
+ DEFINES+=" android_build_type=1"
+ DEFINES+=" sdk_build=0"
+ DEFINES+=" android_src=\$(GYP_ABS_ANDROID_TOP_DIR)"
+ DEFINES+=" android_product_out=NOT_USED_ON_WEBVIEW"
+ DEFINES+=" android_upstream_bringup=1"
+ DEFINES+=" android_sdk=\$(GYP_ABS_ANDROID_TOP_DIR)/${ANDROID_SDK}"
+ DEFINES+=" android_sdk_root=\$(GYP_ABS_ANDROID_TOP_DIR)/${ANDROID_SDK}"
+ DEFINES+=" android_sdk_tools=\$(GYP_ABS_ANDROID_TOP_DIR)/${ANDROID_SDK_TOOLS}"
+ DEFINES+=" android_sdk_version=${ANDROID_SDK_VERSION}"
+ DEFINES+=" android_toolchain=${ANDROID_TOOLCHAIN}"
+ export GYP_DEFINES="${DEFINES}"
+
+ export GYP_GENERATORS="android"
+
+ export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} default_target=All"
+ export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} limit_to_target_all=1"
+ export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} auto_regeneration=0"
+
+ export CHROMIUM_GYP_FILE="${CHROME_SRC}/android_webview/all_webview.gyp"
+}
diff --git a/media/webrtc/trunk/build/android/gdb_apk b/media/webrtc/trunk/build/android/gdb_apk
new file mode 100755
index 000000000..7e657d6b4
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gdb_apk
@@ -0,0 +1,171 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach gdb to a running android application. Similar to ndk-gdb.
+# Run with --annotate=3 if running under emacs (M-x gdb).
+#
+# By default it is used to debug content shell, if it is used to
+# debug other piceces, '-p' and '-l' options are needed.
+# For *unittests_apk (like base_unittests_apk), run with:
+# "gdb_apk -p org.chromium.native_test -l out/Release/lib.target -r"
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# args: command to run
+# Prints the command's stdout on stdout
+# Returns the command's status
+# Note: the command's stderr is lost
+adb_shell () {
+ local TMPOUT="$(mktemp)"
+ local LASTLINE RET
+ local ADB=${ADB:-adb}
+
+ # The weird sed rule is to strip the final \r on each output line
+ # Since 'adb shell' never returns the command's proper exit/status code,
+ # we force it to print it as '%%<status>' in the temporary output file,
+ # which we will later strip from it.
+ $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+ # Get last line in log, which contains the exit code from the command
+ LASTLINE=$(sed -e '$!d' $TMPOUT)
+ # Extract the status code from the end of the line, which must be '%%<code>'
+ RET=$(echo "$LASTLINE" | awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+ # Remove the status code from the last line. Note that this may result in an empty line
+ LASTLINE=$(echo "$LASTLINE" | awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+ # The output itself: all lines except the status code
+ sed -e '$d' $TMPOUT && echo -n "$LASTLINE"
+ # Remove temp file
+ rm -f $TMPOUT
+ # Exit with the appropriate status
+ return $RET
+}
+
+adb=$(which adb)
+if [[ "$adb" = "" ]] ; then
+ echo "Need adb in your path"
+ exit 1
+fi
+
+usage() {
+ echo "usage: ${0##*/} [-p package_name] [-l shared_lib_dir] [-g gdb] [-r]"
+ echo "-p package_name the android APK package to be debugged"
+ echo "-l shared_lib_dir directory containes native shared library"
+ echo "-g gdb_args agruments for gdb, eg: -g '-n -write'"
+ echo "-r the target device is rooted"
+}
+
+process_options() {
+ local OPTNAME OPTIND OPTERR OPTARG
+ while getopts ":p:l:g:r" OPTNAME; do
+ case "$OPTNAME" in
+ p)
+ package_name="$OPTARG"
+ ;;
+ l)
+ shared_lib_dir="$OPTARG"
+ ;;
+ g)
+ gdb_args="$OPTARG"
+ ;;
+ r)
+ rooted_phone=1
+ ;;
+ \:)
+ echo "'-$OPTARG' needs an argument."
+ usage
+ exit 1
+ ;;
+ *)
+ echo "invalid command line option: $OPTARG"
+ usage
+ exit 1
+ ;;
+ esac
+ done
+
+ if [ $# -ge ${OPTIND} ]; then
+ eval echo "Unexpected command line argument: \${${OPTIND}}"
+ usage
+ exit 1
+ fi
+}
+
+rooted_phone=0
+
+root=$(dirname $0)/../..
+package_name=org.chromium.content_shell
+shared_lib_dir=$root/out/${BUILDTYPE:-Debug}/lib.target
+gdb_args=''
+
+#process options
+process_options "$@"
+echo "Debug package $package_name"
+echo "Assume native shared library is under $shared_lib_dir"
+
+data_dir=/data/data/$package_name
+gdb_server_on_device=$data_dir/lib/gdbserver
+
+# Kill any running gdbserver
+pid=$(adb shell ps | awk '/gdbserver/ {print $2}')
+if [[ "$pid" != "" ]] ; then
+ if [[ $rooted_phone -eq 1 ]] ; then
+ adb shell kill $pid
+ else
+ adb shell run-as $package_name kill $pid
+ fi
+fi
+
+pid=$(adb_shell ps | awk "/$package_name$/ {print \$2}")
+if [[ "$pid" = "" ]] ; then
+ echo "No $package_name running?"
+ echo "Try this: adb shell am start -a android.intent.action.VIEW " \
+ "-n $package_name/.SomethingActivity (Something might be ContentShell)"
+ exit 2
+fi
+
+no_gdb_server=$(adb shell ls $gdb_server_on_device | grep 'No such file')
+if [[ "$no_gdb_server" != "" ]] ; then
+ echo "No gdb server on device at $gdb_server_on_device"
+ echo "Please install a debug build."
+ exit 3
+fi
+
+if [[ $rooted_phone -eq 1 ]] ; then
+ adb shell $gdb_server_on_device :4321 --attach $pid &
+ adb forward tcp:4321 tcp:4321
+else
+ adb shell run-as $package_name lib/gdbserver +debug-socket --attach $pid &
+ adb forward tcp:4321 localfilesystem:$data_dir/debug-socket
+fi
+sleep 2
+
+# Pull app_process and C libraries from device if needed
+app_process=${shared_lib_dir}/app_process
+if [[ ! -f ${app_process} ]] ; then
+ adb pull /system/bin/app_process ${app_process}
+ adb pull /system/lib/libc.so ${shared_lib_dir}
+fi
+
+# gdb commands
+cmdfile=$(mktemp /tmp/gdb_android_XXXXXXXX)
+cat >$cmdfile<<EOF
+# set solib-absolute-prefix null
+set solib-search-path ${shared_lib_dir}
+file ${app_process}
+target remote :4321
+EOF
+
+gdb=$(echo $ANDROID_TOOLCHAIN/../../linux-x86/bin/*gdb)
+if [[ ! -f ${gdb} ]] ; then
+ echo "Wow no gdb in env var ANDROID_TOOLCHAIN which is $ANDROID_TOOLCHAIN"
+ exit 4
+else
+ echo Using $gdb
+fi
+
+# ${gdb} -x $cmdfile $* $app_process
+${gdb} -x $cmdfile $gdb_args
+rm $cmdfile
diff --git a/media/webrtc/trunk/build/android/gdb_content_shell b/media/webrtc/trunk/build/android/gdb_content_shell
new file mode 100755
index 000000000..ee55c933b
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gdb_content_shell
@@ -0,0 +1,15 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach gdb to a running content shell. Redirect to the shell gdb_apk
+
+ROOT=$(cd "$(dirname $0)"; pwd)
+
+if [ $# -gt 0 ]; then
+ exec ${ROOT}/gdb_apk -r -g "$*"
+else
+ exec ${ROOT}/gdb_apk -r -p org.chromium.content_shell
+fi
diff --git a/media/webrtc/trunk/build/android/gtest_filter/base_unittests_disabled b/media/webrtc/trunk/build/android/gtest_filter/base_unittests_disabled
new file mode 100644
index 000000000..72530cb8a
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/base_unittests_disabled
@@ -0,0 +1,29 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+ProcessUtilTest.GetAppOutputRestrictedSIGPIPE
+# TODO(jrg): Fails on bots. Works locally. Figure out why. 2/6/12
+FieldTrialTest.*
+# TODO(zhenghao): Fail from build 6102 r123270. http://crbug.com/115612
+StackContainer.BufferAlignment
+# Failed on bot since it was first introduced.
+FileUtilProxyTest.Touch
+# Failing because FD remapping not supported in multiprocess_test_android.cc yet.
+ProcessUtilTest.FDRemapping
+# Flaky?
+ScopedJavaRefTest.RefCounts
+# Death tests are not supported with apks.
+*DeathTest*
+
diff --git a/media/webrtc/trunk/build/android/gtest_filter/base_unittests_emulator_additional_disabled b/media/webrtc/trunk/build/android/gtest_filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 000000000..85e8fd636
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Addtional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/media/webrtc/trunk/build/android/gtest_filter/content_unittests_disabled b/media/webrtc/trunk/build/android/gtest_filter/content_unittests_disabled
new file mode 100644
index 000000000..6ec30f1e7
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/content_unittests_disabled
@@ -0,0 +1,21 @@
+# List of suppressions
+
+AudioRendererHostTest.CreateAndClose
+AudioRendererHostTest.CreateAndShutdown
+AudioRendererHostTest.CreatePlayAndClose
+AudioRendererHostTest.CreatePlayPauseAndClose
+AudioRendererHostTest.SetVolume
+AudioRendererHostTest.CreatePlayAndShutdown
+AudioRendererHostTest.CreatePlayPauseAndShutdown
+AudioRendererHostTest.SimulateError
+AudioRendererHostTest.SimulateErrorAndClose
+# crbug.com/104950
+DeviceOrientationProviderTest.ObserverNotRemoved
+DeviceOrientationProviderTest.StartFailing
+# crbug.com/138930
+SkCanvasVideoRendererTest.*
+# crbug.com/139095
+RenderWidgetTest.OnMsgPaintAtSize
+# crbug.com/147549
+GamepadProviderTest.PollingAccess
+PepperGamepadHostTest.WaitForReply
diff --git a/media/webrtc/trunk/build/android/gtest_filter/ipc_tests_disabled b/media/webrtc/trunk/build/android/gtest_filter/ipc_tests_disabled
new file mode 100644
index 000000000..fd2485af1
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/ipc_tests_disabled
@@ -0,0 +1,14 @@
+# Times out
+IPCSyncChannelTest.ChattyServer
+
+# MultiProcessTest related failures. These tests fail if DCHECK is enabled.
+IPCChannelPosixTest.AdvancedConnected
+IPCChannelPosixTest.ResetState
+IPCChannelPosixTest.MultiConnection
+IPCFuzzingTest.SanityTest
+IPCFuzzingTest.MsgBadPayloadArgs
+IPCChannelTest.DescriptorTest
+IPCChannelTest.ChannelTest
+IPCChannelTest.ChannelProxyTest
+IPCChannelTest.SendMessageInChannelConnected
+SyncSocketTest.SanityTest
diff --git a/media/webrtc/trunk/build/android/gtest_filter/media_unittests_disabled b/media/webrtc/trunk/build/android/gtest_filter/media_unittests_disabled
new file mode 100644
index 000000000..96d106b63
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/media_unittests_disabled
@@ -0,0 +1,11 @@
+# List of suppressions
+
+# crbug.com/136720
+CrossProcessNotificationMultiProcessTest.Basic
+
+# Death tests are not supported on APK
+# http://crbug.com/138855
+CompositeFilterDeathTest.*
+
+# http://crbug.com/138833
+AesDecryptorTest.*
diff --git a/media/webrtc/trunk/build/android/gtest_filter/net_unittests_disabled b/media/webrtc/trunk/build/android/gtest_filter/net_unittests_disabled
new file mode 100644
index 000000000..327586463
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/net_unittests_disabled
@@ -0,0 +1,124 @@
+# List of suppressions.
+# Generated by hand to get net_unittests running initially.
+# Likely too aggressive disabling.
+DiskCacheBackendTest.*
+TransportSecurityStateTest.ParseSidePins*
+# TODO(jnd): The following tests are disabled because test server spawner is not
+# enabled on Chromium Android. Once the net upstreaming is done, test server
+# spawner can be enabled and those tests should pass.
+# BUG=135165
+X509CertificateTest.*
+X509CertificateParseTest.*
+HttpNetworkTransactionSpdy2Test.SOCKS4_HTTP_GET
+HttpNetworkTransactionSpdy2Test.SOCKS4_SSL_GET
+HttpNetworkTransactionSpdy21Test.SOCKS4_HTTP_GET
+HttpNetworkTransactionSpdy21Test.SOCKS4_SSL_GET
+HttpNetworkTransactionSpdy3Test.SOCKS4_HTTP_GET
+HttpNetworkTransactionSpdy3Test.SOCKS4_SSL_GET
+HttpNetworkTransactionTest.SOCKS4_HTTP_GET
+HttpNetworkTransactionTest.SOCKS4_SSL_GET
+HttpNetworkTransactionTest.UploadUnreadableFile
+HttpNetworkTransactionTest.UnreadableUploadFileAfterAuthRestart
+ProxyResolverJSBindingsTest.MyIpAddress
+ProxyScriptFetcherImplTest.*
+SOCKSClientSocketTest.*
+SSLClientSocketTest.*
+PythonUtils.PythonRunTime
+URLRequestTestHTTP.*
+URLRequestTestFTP.*
+HTTPSRequestTest.HTTPSMismatchedTest
+HTTPSRequestTest.HTTPSExpiredTest
+HTTPSRequestTest.HTTPSPreloadedHSTSTest
+HTTPSRequestTest.ClientAuthTest
+URLRequestTest.DelayedCookieCallback
+URLRequestTest.DoNotSendCookies
+URLRequestTest.DoNotSaveCookies
+URLRequestTest.DoNotSendCookies_ViaPolicy
+URLRequestTest.DoNotSaveCookies_ViaPolicy
+URLRequestTest.DoNotSaveEmptyCookies
+URLRequestTest.DoNotSendCookies_ViaPolicy_Async
+URLRequestTest.DoNotSaveCookies_ViaPolicy_Async
+URLRequestTest.DoNotOverrideReferrer
+WebSocketJobSpdy2Test.ThrottlingWebSocket
+WebSocketJobSpdy2Test.ThrottlingWebSocketSpdyEnabled
+WebSocketJobSpdy2Test.ThrottlingSpdy
+WebSocketJobSpdy2Test.ThrottlingSpdySpdyEnabled
+WebSocketJobSpdy3Test.ThrottlingWebSocket
+WebSocketJobSpdy3Test.ThrottlingWebSocketSpdyEnabled
+WebSocketJobSpdy3Test.ThrottlingSpdy
+WebSocketJobSpdy3Test.ThrottlingSpdySpdyEnabled
+WebSocketJobTest.ThrottlingWebSocket
+WebSocketJobTest.ThrottlingWebSocketSpdyEnabled
+WebSocketJobTest.ThrottlingSpdy
+WebSocketJobTest.ThrottlingSpdySpdyEnabled
+X509CertificateWeakDigestTest.*
+*/X509CertificateWeakDigestTest.*
+TransportSecurityStateTest.BogusPinsHeaders
+TransportSecurityStateTest.ValidPinsHeadersSHA1
+TransportSecurityStateTest.ValidPinsHeadersSHA256
+HTTPSRequestTest.ResumeTest
+HTTPSRequestTest.SSLSessionCacheShardTest
+HTTPSRequestTest.HTTPSErrorsNoClobberTSSTest
+HttpNetworkTransactionSpdy3Test.UploadUnreadableFile
+HttpNetworkTransactionSpdy2Test.UploadUnreadableFile
+HTTPSRequestTest.SSLv3Fallback
+HTTPSEVCRLSetTest.FreshCRLSet
+HTTPSCRLSetTest.ExpiredCRLSet
+URLFetcherTest.SameThreadsTest
+URLFetcherTest.DifferentThreadsTest
+URLFetcherTest.CancelAll
+URLFetcherPostTest.Basic
+URLFetcherUploadProgressTest.Basic
+URLFetcherDownloadProgressTest.Basic
+URLFetcherDownloadProgressCancelTest.CancelWhileProgressReport
+URLFetcherHeadersTest.Headers
+URLFetcherSocketAddressTest.SocketAddress
+URLFetcherStopOnRedirectTest.StopOnRedirect
+URLFetcherProtectTest.Overload
+URLFetcherProtectTest.ServerUnavailable
+URLFetcherProtectTestPassedThrough.ServerUnavailablePropagateResponse
+URLFetcherBadHTTPSTest.BadHTTPSTest
+URLFetcherCancelTest.ReleasesContext
+URLFetcherCancelTest.CancelWhileDelayedStartTaskPending
+URLFetcherMultipleAttemptTest.SameData
+URLFetcherFileTest.SmallGet
+URLFetcherFileTest.LargeGet
+URLFetcherFileTest.CantakeOfFile
+URLFetcherFileTest.OverwriteExistingFile
+URLFetcherFileTest.TryToOverwriteDirectory
+URLFetcherFileTest.SmallGetToTempFile
+URLFetcherFileTest.LargeGetToTempFile
+URLFetcherFileTest.CantakeOfTempFile
+URLFetcherEmptyPostTest.Basic
+# TODO(jrg): Fails on bots. Works locally. Figure out why. 2/6/12
+HttpCache.TypicalGET_ConditionalRequest
+HttpCache.RangeGET_OK
+HttpCache.RangeGET_SyncOK
+HttpCache.RangeGET_Revalidate2
+HttpCache.RangeGET_Previous200
+HttpCache.RangeGET_Cancel2
+# crbug.com/139144
+UDPSocketTest.Broadcast
+
+UDPSocketTest.ConnectRandomBind
+UDPSocketTest.ClientGetLocalPeerAddresses
+# Disabled 4/13/12 by jrg. More CertVerifyProcTest.* tests fail on
+# the bot than locally; disabling them all for now.
+CertVerifyProcTest.*
+# Runs fine locally but not on the bot.
+VerifyRoot/CertVerifyProcWeakDigestTest.*
+# Relies on TestServer which isn't yet configured upstream.
+URLRequestContextBuilderTest.*
+
+# All cert_verify_proc_unittest.cc failing since JNI was enabled.
+VerifyIntermediate/CertVerifyProcWeakDigestTest.*
+VerifyEndEntity/CertVerifyProcWeakDigestTest.*
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.*
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.*
+VerifyMixed/CertVerifyProcWeakDigestTest.*
+
+# Death tests are not supported with apks.
+*DeathTest*
+# These are death tests and thus also disabled.
+PrioritizedDispatcherTest.CancelNull
+PrioritizedDispatcherTest.CancelMissing
diff --git a/media/webrtc/trunk/build/android/gtest_filter/sync_unit_tests_disabled b/media/webrtc/trunk/build/android/gtest_filter/sync_unit_tests_disabled
new file mode 100644
index 000000000..cc4b72d13
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/sync_unit_tests_disabled
@@ -0,0 +1,4 @@
+SyncHttpBridgeTest.*
+
+# crbug.com/144422
+OnDiskSyncableDirectory.FailInitialWrite
diff --git a/media/webrtc/trunk/build/android/gtest_filter/ui_unittests_disabled b/media/webrtc/trunk/build/android/gtest_filter/ui_unittests_disabled
new file mode 100644
index 000000000..793cc9b28
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/ui_unittests_disabled
@@ -0,0 +1,44 @@
+# List of suppressions
+# This file was automatically generated by build/android/run_tests.py
+BytesFormattingTest.FormatBytes
+CanvasTest.StringSizeEmptyString
+CanvasTest.StringWidth
+ClipboardTest.RTFTest
+FontListTest.FontDescString_FromFont
+FontListTest.FontDescString_FromFontVector
+FontListTest.FontDescString_FromFontWithNonNormalStyle
+FontListTest.Fonts_DeriveFontList
+FontListTest.Fonts_DeriveFontListWithSize
+FontListTest.Fonts_DescStringWithStyleInFlexibleFormat_RoundTrip
+FontListTest.Fonts_FontVector_RoundTrip
+FontListTest.Fonts_FromDescString
+FontListTest.Fonts_FromDescStringInFlexibleFormat
+FontListTest.Fonts_FromDescStringWithStyleInFlexibleFormat
+FontListTest.Fonts_FromFont
+FontListTest.Fonts_FromFontVector
+FontListTest.Fonts_FromFontWithNonNormalStyle
+FontListTest.Fonts_GetStyle
+FontTest.Ascent
+FontTest.AvgCharWidth
+FontTest.AvgWidths
+FontTest.Height
+FontTest.LoadArial
+FontTest.LoadArialBold
+FontTest.Widths
+L10nUtilTest.GetDisplayNameForCountry
+L10nUtilTest.GetDisplayNameForLocale
+ResourceBundle.DelegateGetFont
+TextEliderTest.ElideEmail
+TextEliderTest.ElideEmailMoreSpace
+TextEliderTest.ElideRectangleText
+TextEliderTest.ElideRectangleTextLongWords
+TextEliderTest.ElideRectangleTextPunctuation
+TextEliderTest.ElideTextLongStrings
+TextEliderTest.ElideTextSurrogatePairs
+TextEliderTest.ElideTextTruncate
+TextEliderTest.TestFileURLEliding
+TextEliderTest.TestFilenameEliding
+TextEliderTest.TestGeneralEliding
+TextEliderTest.TestMoreEliding
+TextEliderTest.TestTrailingEllipsisSlashEllipsisHack
+TreeNodeIteratorPruneTest.Test
diff --git a/media/webrtc/trunk/build/android/gtest_filter/unit_tests_disabled b/media/webrtc/trunk/build/android/gtest_filter/unit_tests_disabled
new file mode 100644
index 000000000..d9337f27c
--- /dev/null
+++ b/media/webrtc/trunk/build/android/gtest_filter/unit_tests_disabled
@@ -0,0 +1,149 @@
+# List of suppressions
+
+# crbug.com/139429
+BrowserMainTest.WarmConnectionFieldTrial_Invalid
+BrowserMainTest.WarmConnectionFieldTrial_Random
+BrowserMainTest.WarmConnectionFieldTrial_WarmestSocket
+
+# crbug.com/139431
+ChromePaths.UserCacheDir
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# http://crbug.com/117940
+TemplateURLTest.*
+TemplateURLPrepopulateDataTest.*
+TemplateURLServiceSyncTest.*
+SearchHostToURLsMapTest.*
+
+# crbug.com/139427
+TemplateURLFetcherTest.*
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# DEATH test, forking and doing bad stuff, not supported yet.
+IncognitoModePrefsDeathTest.GetAvailabilityBadValue
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+WebsiteSettingsTest.OnSiteDataAccessed
+# Tests failing in the APK (mostly failing to create files out of the Application dir).
+DiagnosticsModelTest.RunAll
+FirstRunTest.RemoveSentinel
+GoogleUpdateTest.StatsConsent
+URLFixerUpperTest.FixupFile
+URLFixerUpperTest.FixupRelativeFile
+DownloadPathReservationTrackerTest.CreateDefaultDownloadPath
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# http://crbug.com/139165
+KeywordEditorControllerTest.*
+
+# Extension support is limited on Android.
+# Some of these can be enabled if we register extension related prefs in
+# browser_prefs.cc
+ExtensionTest.*
+ExtensionAPI.*
+ExtensionFileUtil.*
+ExtensionPermissionsTest.*
+ExtensionUnpackerTest.*
+ActiveTabTest.*
+ExtensionAppsPromo.*
+ComponentLoaderTest.*
+ExtensionFromUserScript.*
+ExtensionFromWebApp.*
+ExtensionIconManagerTest.*
+ExtensionServiceTest.*
+ExtensionServiceTestSimple.*
+ExtensionSourcePriorityTest.*
+ExtensionSpecialStoragePolicyTest.*
+ExternalPolicyProviderTest.*
+MenuManagerTest.*
+PageActionControllerTest.*
+PermissionsUpdaterTest.*
+ImageLoadingTrackerTest.*
+ScriptBadgeControllerTest.*
+ExtensionSettingsFrontendTest.*
+ExtensionSettingsSyncTest.*
+ExtensionUpdaterTest.*
+UserScriptListenerTest.*
+WebApplicationTest.GetShortcutInfoForTab
+ExtensionActionIconFactoryTest.*
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+# Test server and forwarder and not ready yet.
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+ConnectionTesterTest.*
+HttpPipeliningCompatibilityClientTest.*
+NetworkStatsTestTCP.*
+ConnectionTesterTest.RunAllTests
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/138275
+PrerenderTest.*
+RenderWidgetTest.OnMsgPaintAtSize
+
+# crbug.com/146857
+TopSitesTest.Blacklisting
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/144227
+ExtensionIconImageTest.*
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
diff --git a/media/webrtc/trunk/build/android/lighttpd_server.py b/media/webrtc/trunk/build/android/lighttpd_server.py
new file mode 100755
index 000000000..11ae794d4
--- /dev/null
+++ b/media/webrtc/trunk/build/android/lighttpd_server.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+ lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+ """Wraps lighttpd server, providing robust startup.
+
+ Args:
+ document_root: Path to root of this server's hosted files.
+ port: TCP port on the _host_ machine that the server will listen on. If
+ ommitted it will attempt to use 9000, or if unavailable it will find
+ a free port from 8001 - 8999.
+ lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+ base_config_path: If supplied this file will replace the built-in default
+ lighttpd config file.
+ extra_config_contents: If specified, this string will be appended to the
+ base config (default built-in, or from base_config_path).
+ config_path, error_log, access_log: Optional paths where the class should
+ place temprary files for this session.
+ """
+
+ def __init__(self, document_root, port=None,
+ lighttpd_path=None, lighttpd_module_path=None,
+ base_config_path=None, extra_config_contents=None,
+ config_path=None, error_log=None, access_log=None):
+ self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+ self.document_root = os.path.abspath(document_root)
+ self.fixed_port = port
+ self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+ self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+ self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+ self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+ self.base_config_path = base_config_path
+ self.extra_config_contents = extra_config_contents
+ self.config_path = config_path or self._Mktmp('config')
+ self.error_log = error_log or self._Mktmp('error_log')
+ self.access_log = access_log or self._Mktmp('access_log')
+ self.pid_file = self._Mktmp('pid_file')
+ self.process = None
+
+ def _Mktmp(self, name):
+ return os.path.join(self.temp_dir, name)
+
+ def _GetRandomPort(self):
+ # The ports of test server is arranged in constants.py.
+ return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+ constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+ def StartupHttpServer(self):
+ """Starts up a http server with specified document root and port."""
+ # If we want a specific port, make sure no one else is listening on it.
+ if self.fixed_port:
+ self._KillProcessListeningOnPort(self.fixed_port)
+ while True:
+ if self.base_config_path:
+ # Read the config
+ with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+ config_contents = f.read()
+ else:
+ config_contents = self._GetDefaultBaseConfig()
+ if self.extra_config_contents:
+ config_contents += self.extra_config_contents
+ # Write out the config, filling in placeholders from the members of |self|
+ with codecs.open(self.config_path, 'w', 'utf-8') as f:
+ f.write(config_contents % self.__dict__)
+ if (not os.path.exists(self.lighttpd_path) or
+ not os.access(self.lighttpd_path, os.X_OK)):
+ raise EnvironmentError(
+ 'Could not find lighttpd at %s.\n'
+ 'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+ % self.lighttpd_path)
+ self.process = pexpect.spawn(self.lighttpd_path,
+ ['-D', '-f', self.config_path,
+ '-m', self.lighttpd_module_path],
+ cwd=self.temp_dir)
+ client_error, server_error = self._TestServerConnection()
+ if not client_error:
+ assert int(open(self.pid_file, 'r').read()) == self.process.pid
+ break
+ self.process.close()
+
+ if self.fixed_port or not 'in use' in server_error:
+ print 'Client error:', client_error
+ print 'Server error:', server_error
+ return False
+ self.port = self._GetRandomPort()
+ return True
+
+ def ShutdownHttpServer(self):
+ """Shuts down our lighttpd processes."""
+ if self.process:
+ self.process.terminate()
+ shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+ def _TestServerConnection(self):
+ # Wait for server to start
+ server_msg = ''
+ for timeout in xrange(1, 5):
+ client_error = None
+ try:
+ with contextlib.closing(httplib.HTTPConnection(
+ '127.0.0.1', self.port, timeout=timeout)) as http:
+ http.set_debuglevel(timeout > 3)
+ http.request('HEAD', '/')
+ r = http.getresponse()
+ r.read()
+ if (r.status == 200 and r.reason == 'OK' and
+ r.getheader('Server') == self.server_tag):
+ return (None, server_msg)
+ client_error = ('Bad response: %s %s version %s\n ' %
+ (r.status, r.reason, r.version) +
+ '\n '.join([': '.join(h) for h in r.getheaders()]))
+ except (httplib.HTTPException, socket.error) as client_error:
+ pass # Probably too quick connecting: try again
+ # Check for server startup error messages
+ ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+ timeout=timeout)
+ if ix == 2: # stdout spew from the server
+ server_msg += self.process.match.group(0)
+ elif ix == 1: # EOF -- server has quit so giveup.
+ client_error = client_error or 'Server exited'
+ break
+ return (client_error or 'Timeout', server_msg)
+
+ def _KillProcessListeningOnPort(self, port):
+ """Checks if there is a process listening on port number |port| and
+ terminates it if found.
+
+ Args:
+ port: Port number to check.
+ """
+ if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+ # Give the process some time to terminate and check that it is gone.
+ time.sleep(2)
+ assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+ 'Unable to kill process listening on port %d.' % port
+
+ def _GetDefaultBaseConfig(self):
+ return """server.tag = "%(server_tag)s"
+server.modules = ( "mod_access",
+ "mod_accesslog",
+ "mod_alias",
+ "mod_cgi",
+ "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names = ( "index.php", "index.pl", "index.cgi",
+ "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign = (
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".svg" => "image/svg+xml",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".xhtml" => "application/xhtml+xml",
+ ".xhtmlmp" => "application/vnd.wap.xhtml+xml",
+ ".js" => "application/x-javascript",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".manifest" => "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate = "enable"
+#dir-listing.encoding = "iso-8859-2"
+#dir-listing.external-css = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header = "enable"
+#debug.log-response-header = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found = "enable"
+
+#### SSL engine
+#ssl.engine = "enable"
+#ssl.pemfile = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi" => "/usr/bin/env",
+ ".pl" => "/usr/bin/env",
+ ".asis" => "/bin/cat",
+ ".php" => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+ server = LighttpdServer(*argv[1:])
+ try:
+ if server.StartupHttpServer():
+ raw_input('Server running at http://127.0.0.1:%s -'
+ ' press Enter to exit it.' % server.port)
+ else:
+ print 'Server exit code:', server.process.exitstatus
+ finally:
+ server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/android/pylib/__init__.py b/media/webrtc/trunk/build/android/pylib/__init__.py
new file mode 100644
index 000000000..727e987e6
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/media/webrtc/trunk/build/android/pylib/android_commands.py b/media/webrtc/trunk/build/android/pylib/android_commands.py
new file mode 100644
index 000000000..8a1562caa
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/android_commands.py
@@ -0,0 +1,1071 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+"""
+
+import collections
+import datetime
+import logging
+import os
+import re
+import shlex
+import subprocess
+import sys
+import tempfile
+import time
+
+import io_stats_parser
+from pylib import pexpect
+
+CHROME_SRC = os.path.join(
+ os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+sys.path.append(os.path.join(CHROME_SRC, 'third_party', 'android_testrunner'))
+import adb_interface
+
+import cmd_helper
+import errors # is under ../../../third_party/android_testrunner/errors.py
+
+
+# Pattern to search for the next whole line of pexpect output and capture it
+# into a match group. We can't use ^ and $ for line start end with pexpect,
+# see http://www.noah.org/python/pexpect/#doc for explanation why.
+PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r')
+
+# Set the adb shell prompt to be a unique marker that will [hopefully] not
+# appear at the start of any line of a command's output.
+SHELL_PROMPT = '~+~PQ\x17RS~+~'
+
+# Java properties file
+LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Property in /data/local.prop that controls Java assertions.
+JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+MEMORY_INFO_RE = re.compile('^(?P<key>\w+):\s+(?P<usage_kb>\d+) kB$')
+NVIDIA_MEMORY_INFO_RE = re.compile('^\s*(?P<user>\S+)\s*(?P<name>\S+)\s*'
+ '(?P<pid>\d+)\s*(?P<usage_bytes>\d+)$')
+
+# Keycode "enum" suitable for passing to AndroidCommands.SendKey().
+KEYCODE_HOME = 3
+KEYCODE_BACK = 4
+KEYCODE_DPAD_UP = 19
+KEYCODE_DPAD_DOWN = 20
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
+MD5SUM_DEVICE_PATH = '/data/local/tmp/md5sum_bin'
+
+def GetEmulators():
+ """Returns a list of emulators. Does not filter by status (e.g. offline).
+
+ Both devices starting with 'emulator' will be returned in below output:
+
+ * daemon not running. starting it now on port 5037 *
+ * daemon started successfully *
+ List of devices attached
+ 027c10494100b4d7 device
+ emulator-5554 offline
+ emulator-5558 device
+ """
+ re_device = re.compile('^emulator-[0-9]+', re.MULTILINE)
+ devices = re_device.findall(cmd_helper.GetCmdOutput(['adb', 'devices']))
+ return devices
+
+
+def GetAVDs():
+ """Returns a list of AVDs."""
+ re_avd = re.compile('^[ ]+Name: ([a-zA-Z0-9_:.-]+)', re.MULTILINE)
+ avds = re_avd.findall(cmd_helper.GetCmdOutput(['android', 'list', 'avd']))
+ return avds
+
+
+def GetAttachedDevices():
+ """Returns a list of attached, online android devices.
+
+ If a preferred device has been set with ANDROID_SERIAL, it will be first in
+ the returned list.
+
+ Example output:
+
+ * daemon not running. starting it now on port 5037 *
+ * daemon started successfully *
+ List of devices attached
+ 027c10494100b4d7 device
+ emulator-5554 offline
+ """
+ re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
+ devices = re_device.findall(cmd_helper.GetCmdOutput(['adb', 'devices']))
+ preferred_device = os.environ.get('ANDROID_SERIAL')
+ if preferred_device in devices:
+ devices.remove(preferred_device)
+ devices.insert(0, preferred_device)
+ return devices
+
+def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None):
+ """Gets a list of files from `ls` command output.
+
+ Python's os.walk isn't used because it doesn't work over adb shell.
+
+ Args:
+ path: The path to list.
+ ls_output: A list of lines returned by an `ls -lR` command.
+ re_file: A compiled regular expression which parses a line into named groups
+ consisting of at minimum "filename", "date", "time", "size" and
+ optionally "timezone".
+ utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a
+ 2-digit string giving the number of UTC offset hours, and MM is a
+ 2-digit string giving the number of UTC offset minutes. If the input
+ utc_offset is None, will try to look for the value of "timezone" if it
+ is specified in re_file.
+
+ Returns:
+ A dict of {"name": (size, lastmod), ...} where:
+ name: The file name relative to |path|'s directory.
+ size: The file size in bytes (0 for directories).
+ lastmod: The file last modification date in UTC.
+ """
+ re_directory = re.compile('^%s/(?P<dir>[^:]+):$' % re.escape(path))
+ path_dir = os.path.dirname(path)
+
+ current_dir = ''
+ files = {}
+ for line in ls_output:
+ directory_match = re_directory.match(line)
+ if directory_match:
+ current_dir = directory_match.group('dir')
+ continue
+ file_match = re_file.match(line)
+ if file_match:
+ filename = os.path.join(current_dir, file_match.group('filename'))
+ if filename.startswith(path_dir):
+ filename = filename[len(path_dir)+1:]
+ lastmod = datetime.datetime.strptime(
+ file_match.group('date') + ' ' + file_match.group('time')[:5],
+ '%Y-%m-%d %H:%M')
+ if not utc_offset and 'timezone' in re_file.groupindex:
+ utc_offset = file_match.group('timezone')
+ if isinstance(utc_offset, str) and len(utc_offset) == 5:
+ utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]),
+ minutes=int(utc_offset[3:5]))
+ if utc_offset[0:1] == '-':
+ utc_delta = -utc_delta
+ lastmod -= utc_delta
+ files[filename] = (int(file_match.group('size')), lastmod)
+ return files
+
+def _ComputeFileListHash(md5sum_output):
+ """Returns a list of MD5 strings from the provided md5sum output."""
+ return [line.split(' ')[0] for line in md5sum_output]
+
+def _HasAdbPushSucceeded(command_output):
+ """Returns whether adb push has succeeded from the provided output."""
+ if not command_output:
+ return False
+ # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)"
+ # Errors look like this: "failed to copy ... "
+ if not re.search('^[0-9]', command_output.splitlines()[-1]):
+ logging.critical('PUSH FAILED: ' + command_output)
+ return False
+ return True
+
+def GetLogTimestamp(log_line, year):
+ """Returns the timestamp of the given |log_line| in the given year."""
+ try:
+ return datetime.datetime.strptime('%s-%s' % (year, log_line[:18]),
+ '%Y-%m-%d %H:%M:%S.%f')
+ except (ValueError, IndexError):
+ logging.critical('Error reading timestamp from ' + log_line)
+ return None
+
+
+class AndroidCommands(object):
+ """Helper class for communicating with Android device via adb.
+
+ Args:
+ device: If given, adb commands are only send to the device of this ID.
+ Otherwise commands are sent to all attached devices.
+ """
+
+ def __init__(self, device=None):
+ self._adb = adb_interface.AdbInterface()
+ if device:
+ self._adb.SetTargetSerial(device)
+ self._logcat = None
+ self.logcat_process = None
+ self._pushed_files = []
+ self._device_utc_offset = self.RunShellCommand('date +%z')[0]
+ self._md5sum_path = ''
+ self._external_storage = ''
+
+ def Adb(self):
+ """Returns our AdbInterface to avoid us wrapping all its methods."""
+ return self._adb
+
+ def IsRootEnabled(self):
+ """Checks if root is enabled on the device."""
+ root_test_output = self.RunShellCommand('ls /root') or ['']
+ return not 'Permission denied' in root_test_output[0]
+
+ def EnableAdbRoot(self):
+ """Enables adb root on the device.
+
+ Returns:
+ True: if output from executing adb root was as expected.
+ False: otherwise.
+ """
+ return_value = self._adb.EnableAdbRoot()
+ # EnableAdbRoot inserts a call for wait-for-device only when adb logcat
+ # output matches what is expected. Just to be safe add a call to
+ # wait-for-device.
+ self._adb.SendCommand('wait-for-device')
+ return return_value
+
+ def GetDeviceYear(self):
+ """Returns the year information of the date on device."""
+ return self.RunShellCommand('date +%Y')[0]
+
+ def GetExternalStorage(self):
+ if not self._external_storage:
+ self._external_storage = self.RunShellCommand('echo $EXTERNAL_STORAGE')[0]
+ assert self._external_storage, 'Unable to find $EXTERNAL_STORAGE'
+ return self._external_storage
+
+ def WaitForDevicePm(self):
+ """Blocks until the device's package manager is available.
+
+ To workaround http://b/5201039, we restart the shell and retry if the
+ package manager isn't back after 120 seconds.
+
+ Raises:
+ errors.WaitForResponseTimedOutError after max retries reached.
+ """
+ last_err = None
+ retries = 3
+ while retries:
+ try:
+ self._adb.WaitForDevicePm()
+ return # Success
+ except errors.WaitForResponseTimedOutError as e:
+ last_err = e
+ logging.warning('Restarting and retrying after timeout: %s', e)
+ retries -= 1
+ self.RestartShell()
+ raise last_err # Only reached after max retries, re-raise the last error.
+
+ def RestartShell(self):
+ """Restarts the shell on the device. Does not block for it to return."""
+ self.RunShellCommand('stop')
+ self.RunShellCommand('start')
+
+ def Reboot(self, full_reboot=True):
+ """Reboots the device and waits for the package manager to return.
+
+ Args:
+ full_reboot: Whether to fully reboot the device or just restart the shell.
+ """
+ # TODO(torne): hive can't reboot the device either way without breaking the
+ # connection; work out if we can handle this better
+ if os.environ.get('USING_HIVE'):
+ logging.warning('Ignoring reboot request as we are on hive')
+ return
+ if full_reboot or not self.IsRootEnabled():
+ self._adb.SendCommand('reboot')
+ timeout = 300
+ else:
+ self.RestartShell()
+ timeout = 120
+ # To run tests we need at least the package manager and the sd card (or
+ # other external storage) to be ready.
+ self.WaitForDevicePm()
+ self.WaitForSdCardReady(timeout)
+
+ def Uninstall(self, package):
+ """Uninstalls the specified package from the device.
+
+ Args:
+ package: Name of the package to remove.
+
+ Returns:
+ A status string returned by adb uninstall
+ """
+ uninstall_command = 'uninstall %s' % package
+
+ logging.info('>>> $' + uninstall_command)
+ return self._adb.SendCommand(uninstall_command, timeout_time=60)
+
+ def Install(self, package_file_path, reinstall=False):
+ """Installs the specified package to the device.
+
+ Args:
+ package_file_path: Path to .apk file to install.
+ reinstall: Reinstall an existing apk, keeping the data.
+
+ Returns:
+ A status string returned by adb install
+ """
+ assert os.path.isfile(package_file_path), ('<%s> is not file' %
+ package_file_path)
+
+ install_cmd = ['install']
+
+ if reinstall:
+ install_cmd.append('-r')
+
+ install_cmd.append(package_file_path)
+ install_cmd = ' '.join(install_cmd)
+
+ logging.info('>>> $' + install_cmd)
+ return self._adb.SendCommand(install_cmd, timeout_time=2*60, retry_count=0)
+
+ def ManagedInstall(self, apk_path, keep_data=False, package_name=None,
+ reboots_on_failure=2):
+ """Installs specified package and reboots device on timeouts.
+
+ Args:
+ apk_path: Path to .apk file to install.
+ keep_data: Reinstalls instead of uninstalling first, preserving the
+ application data.
+ package_name: Package name (only needed if keep_data=False).
+ reboots_on_failure: number of time to reboot if package manager is frozen.
+
+ Returns:
+ A status string returned by adb install
+ """
+ reboots_left = reboots_on_failure
+ while True:
+ try:
+ if not keep_data:
+ assert package_name
+ self.Uninstall(package_name)
+ install_status = self.Install(apk_path, reinstall=keep_data)
+ if 'Success' in install_status:
+ return install_status
+ except errors.WaitForResponseTimedOutError:
+ print '@@@STEP_WARNINGS@@@'
+ logging.info('Timeout on installing %s' % apk_path)
+
+ if reboots_left <= 0:
+ raise Exception('Install failure')
+
+ # Force a hard reboot on last attempt
+ self.Reboot(full_reboot=(reboots_left == 1))
+ reboots_left -= 1
+
+ def MakeSystemFolderWritable(self):
+ """Remounts the /system folder rw."""
+ out = self._adb.SendCommand('remount')
+ if out.strip() != 'remount succeeded':
+ raise errors.MsgException('Remount failed: %s' % out)
+
+ def RestartAdbServer(self):
+ """Restart the adb server."""
+ self.KillAdbServer()
+ self.StartAdbServer()
+
+ def KillAdbServer(self):
+ """Kill adb server."""
+ adb_cmd = ['adb', 'kill-server']
+ return cmd_helper.RunCmd(adb_cmd)
+
+ def StartAdbServer(self):
+ """Start adb server."""
+ adb_cmd = ['adb', 'start-server']
+ return cmd_helper.RunCmd(adb_cmd)
+
+ def WaitForSystemBootCompleted(self, wait_time):
+ """Waits for targeted system's boot_completed flag to be set.
+
+ Args:
+ wait_time: time in seconds to wait
+
+ Raises:
+ WaitForResponseTimedOutError if wait_time elapses and flag still not
+ set.
+ """
+ logging.info('Waiting for system boot completed...')
+ self._adb.SendCommand('wait-for-device')
+ # Now the device is there, but system not boot completed.
+ # Query the sys.boot_completed flag with a basic command
+ boot_completed = False
+ attempts = 0
+ wait_period = 5
+ while not boot_completed and (attempts * wait_period) < wait_time:
+ output = self._adb.SendShellCommand('getprop sys.boot_completed',
+ retry_count=1)
+ output = output.strip()
+ if output == '1':
+ boot_completed = True
+ else:
+ # If 'error: xxx' returned when querying the flag, it means
+ # adb server lost the connection to the emulator, so restart the adb
+ # server.
+ if 'error:' in output:
+ self.RestartAdbServer()
+ time.sleep(wait_period)
+ attempts += 1
+ if not boot_completed:
+ raise errors.WaitForResponseTimedOutError(
+ 'sys.boot_completed flag was not set after %s seconds' % wait_time)
+
+ def WaitForSdCardReady(self, timeout_time):
+ """Wait for the SD card ready before pushing data into it."""
+ logging.info('Waiting for SD card ready...')
+ sdcard_ready = False
+ attempts = 0
+ wait_period = 5
+ external_storage = self.GetExternalStorage()
+ while not sdcard_ready and attempts * wait_period < timeout_time:
+ output = self.RunShellCommand('ls ' + external_storage)
+ if output:
+ sdcard_ready = True
+ else:
+ time.sleep(wait_period)
+ attempts += 1
+ if not sdcard_ready:
+ raise errors.WaitForResponseTimedOutError(
+ 'SD card not ready after %s seconds' % timeout_time)
+
+ # It is tempting to turn this function into a generator, however this is not
+ # possible without using a private (local) adb_shell instance (to ensure no
+ # other command interleaves usage of it), which would defeat the main aim of
+ # being able to reuse the adb shell instance across commands.
+ def RunShellCommand(self, command, timeout_time=20, log_result=False):
+ """Send a command to the adb shell and return the result.
+
+ Args:
+ command: String containing the shell command to send. Must not include
+ the single quotes as we use them to escape the whole command.
+ timeout_time: Number of seconds to wait for command to respond before
+ retrying, used by AdbInterface.SendShellCommand.
+ log_result: Boolean to indicate whether we should log the result of the
+ shell command.
+
+ Returns:
+ list containing the lines of output received from running the command
+ """
+ logging.info('>>> $' + command)
+ if "'" in command: logging.warning(command + " contains ' quotes")
+ result = self._adb.SendShellCommand(
+ "'%s'" % command, timeout_time).splitlines()
+ if ['error: device not found'] == result:
+ raise errors.DeviceUnresponsiveError('device not found')
+ if log_result:
+ logging.info('\n>>> '.join(result))
+ return result
+
+ def KillAll(self, process):
+ """Android version of killall, connected via adb.
+
+ Args:
+ process: name of the process to kill off
+
+ Returns:
+ the number of processes killed
+ """
+ pids = self.ExtractPid(process)
+ if pids:
+ self.RunShellCommand('kill ' + ' '.join(pids))
+ return len(pids)
+
+ def KillAllBlocking(self, process, timeout_sec):
+ """Blocking version of killall, connected via adb.
+
+ This waits until no process matching the corresponding name appears in ps'
+ output anymore.
+
+ Args:
+ process: name of the process to kill off
+ timeout_sec: the timeout in seconds
+
+ Returns:
+ the number of processes killed
+ """
+ processes_killed = self.KillAll(process)
+ if processes_killed:
+ elapsed = 0
+ wait_period = 0.1
+ # Note that this doesn't take into account the time spent in ExtractPid().
+ while self.ExtractPid(process) and elapsed < timeout_sec:
+ time.sleep(wait_period)
+ elapsed += wait_period
+ if elapsed >= timeout_sec:
+ return 0
+ return processes_killed
+
+ def StartActivity(self, package, activity, wait_for_completion=False,
+ action='android.intent.action.VIEW',
+ category=None, data=None,
+ extras=None, trace_file_name=None):
+ """Starts |package|'s activity on the device.
+
+ Args:
+ package: Name of package to start (e.g. 'com.google.android.apps.chrome').
+ activity: Name of activity (e.g. '.Main' or
+ 'com.google.android.apps.chrome.Main').
+ wait_for_completion: wait for the activity to finish launching (-W flag).
+ action: string (e.g. "android.intent.action.MAIN"). Default is VIEW.
+ category: string (e.g. "android.intent.category.HOME")
+ data: Data string to pass to activity (e.g. 'http://www.example.com/').
+ extras: Dict of extras to pass to activity. Values are significant.
+ trace_file_name: If used, turns on and saves the trace to this file name.
+ """
+ cmd = 'am start -a %s' % action
+ if wait_for_completion:
+ cmd += ' -W'
+ if category:
+ cmd += ' -c %s' % category
+ if package and activity:
+ cmd += ' -n %s/%s' % (package, activity)
+ if data:
+ cmd += ' -d "%s"' % data
+ if extras:
+ for key in extras:
+ value = extras[key]
+ if isinstance(value, str):
+ cmd += ' --es'
+ elif isinstance(value, bool):
+ cmd += ' --ez'
+ elif isinstance(value, int):
+ cmd += ' --ei'
+ else:
+ raise NotImplementedError(
+ 'Need to teach StartActivity how to pass %s extras' % type(value))
+ cmd += ' %s %s' % (key, value)
+ if trace_file_name:
+ cmd += ' --start-profiler ' + trace_file_name
+ self.RunShellCommand(cmd)
+
+ def GoHome(self):
+ """Tell the device to return to the home screen. Blocks until completion."""
+ self.RunShellCommand('am start -W '
+ '-a android.intent.action.MAIN -c android.intent.category.HOME')
+
+ def CloseApplication(self, package):
+ """Attempt to close down the application, using increasing violence.
+
+ Args:
+ package: Name of the process to kill off, e.g.
+ com.google.android.apps.chrome
+ """
+ self.RunShellCommand('am force-stop ' + package)
+
+ def ClearApplicationState(self, package):
+ """Closes and clears all state for the given |package|."""
+ self.CloseApplication(package)
+ self.RunShellCommand('rm -r /data/data/%s/app_*' % package)
+ self.RunShellCommand('rm -r /data/data/%s/cache/*' % package)
+ self.RunShellCommand('rm -r /data/data/%s/files/*' % package)
+ self.RunShellCommand('rm -r /data/data/%s/shared_prefs/*' % package)
+
+ def SendKeyEvent(self, keycode):
+ """Sends keycode to the device.
+
+ Args:
+ keycode: Numeric keycode to send (see "enum" at top of file).
+ """
+ self.RunShellCommand('input keyevent %d' % keycode)
+
+ def PushIfNeeded(self, local_path, device_path):
+ """Pushes |local_path| to |device_path|.
+
+ Works for files and directories. This method skips copying any paths in
+ |test_data_paths| that already exist on the device with the same hash.
+
+ All pushed files can be removed by calling RemovePushedFiles().
+ """
+ assert os.path.exists(local_path), 'Local path not found %s' % local_path
+
+ if not self._md5sum_path:
+ default_build_type = os.environ.get('BUILD_TYPE', 'Debug')
+ md5sum_path = '%s/out/%s/md5sum_bin' % (CHROME_SRC, default_build_type)
+ if not os.path.exists(md5sum_path):
+ md5sum_path = '%s/out/Release/md5sum_bin' % (CHROME_SRC)
+ if not os.path.exists(md5sum_path):
+ print >> sys.stderr, 'Please build md5sum.'
+ sys.exit(1)
+ command = 'push %s %s' % (md5sum_path, MD5SUM_DEVICE_PATH)
+ assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+ self._md5sum_path = md5sum_path
+
+ self._pushed_files.append(device_path)
+ hashes_on_device = _ComputeFileListHash(
+ self.RunShellCommand(MD5SUM_DEVICE_PATH + ' ' + device_path))
+ assert os.path.exists(local_path), 'Local path not found %s' % local_path
+ hashes_on_host = _ComputeFileListHash(
+ subprocess.Popen(
+ '%s_host %s' % (self._md5sum_path, local_path),
+ stdout=subprocess.PIPE, shell=True).stdout)
+ if hashes_on_device == hashes_on_host:
+ return
+
+ # They don't match, so remove everything first and then create it.
+ if os.path.isdir(local_path):
+ self.RunShellCommand('rm -r %s' % device_path, timeout_time=2*60)
+ self.RunShellCommand('mkdir -p %s' % device_path)
+
+ # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout of
+ # 60 seconds which isn't sufficient for a lot of users of this method.
+ push_command = 'push %s %s' % (local_path, device_path)
+ logging.info('>>> $' + push_command)
+ output = self._adb.SendCommand(push_command, timeout_time=30*60)
+ assert _HasAdbPushSucceeded(output)
+
+
+ def GetFileContents(self, filename, log_result=False):
+ """Gets contents from the file specified by |filename|."""
+ return self.RunShellCommand('if [ -f "' + filename + '" ]; then cat "' +
+ filename + '"; fi', log_result=log_result)
+
+ def SetFileContents(self, filename, contents):
+ """Writes |contents| to the file specified by |filename|."""
+ with tempfile.NamedTemporaryFile() as f:
+ f.write(contents)
+ f.flush()
+ self._adb.Push(f.name, filename)
+
+ def RemovePushedFiles(self):
+ """Removes all files pushed with PushIfNeeded() from the device."""
+ for p in self._pushed_files:
+ self.RunShellCommand('rm -r %s' % p, timeout_time=2*60)
+
+ def ListPathContents(self, path):
+ """Lists files in all subdirectories of |path|.
+
+ Args:
+ path: The path to list.
+
+ Returns:
+ A dict of {"name": (size, lastmod), ...}.
+ """
+ # Example output:
+ # /foo/bar:
+ # -rw-r----- 1 user group 102 2011-05-12 12:29:54.131623387 +0100 baz.txt
+ re_file = re.compile('^-(?P<perms>[^\s]+)\s+'
+ '(?P<user>[^\s]+)\s+'
+ '(?P<group>[^\s]+)\s+'
+ '(?P<size>[^\s]+)\s+'
+ '(?P<date>[^\s]+)\s+'
+ '(?P<time>[^\s]+)\s+'
+ '(?P<filename>[^\s]+)$')
+ return _GetFilesFromRecursiveLsOutput(
+ path, self.RunShellCommand('ls -lR %s' % path), re_file,
+ self._device_utc_offset)
+
+
+ def SetJavaAssertsEnabled(self, enable):
+ """Sets or removes the device java assertions property.
+
+ Args:
+ enable: If True the property will be set.
+
+ Returns:
+ True if the file was modified (reboot is required for it to take effect).
+ """
+ # First ensure the desired property is persisted.
+ temp_props_file = tempfile.NamedTemporaryFile()
+ properties = ''
+ if self._adb.Pull(LOCAL_PROPERTIES_PATH, temp_props_file.name):
+ properties = file(temp_props_file.name).read()
+ re_search = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+ r'\s*=\s*all\s*$', re.MULTILINE)
+ if enable != bool(re.search(re_search, properties)):
+ re_replace = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+ r'\s*=\s*\w+\s*$', re.MULTILINE)
+ properties = re.sub(re_replace, '', properties)
+ if enable:
+ properties += '\n%s=all\n' % JAVA_ASSERT_PROPERTY
+
+ file(temp_props_file.name, 'w').write(properties)
+ self._adb.Push(temp_props_file.name, LOCAL_PROPERTIES_PATH)
+
+ # Next, check the current runtime value is what we need, and
+ # if not, set it and report that a reboot is required.
+ was_set = 'all' in self.RunShellCommand('getprop ' + JAVA_ASSERT_PROPERTY)
+ if was_set == enable:
+ return False
+
+ self.RunShellCommand('setprop %s "%s"' % (JAVA_ASSERT_PROPERTY,
+ enable and 'all' or ''))
+ return True
+
+ def GetBuildId(self):
+ """Returns the build ID of the system (e.g. JRM79C)."""
+ build_id = self.RunShellCommand('getprop ro.build.id')[0]
+ assert build_id
+ return build_id
+
+ def GetBuildType(self):
+ """Returns the build type of the system (e.g. eng)."""
+ build_type = self.RunShellCommand('getprop ro.build.type')[0]
+ assert build_type
+ return build_type
+
+ def StartMonitoringLogcat(self, clear=True, timeout=10, logfile=None,
+ filters=None):
+ """Starts monitoring the output of logcat, for use with WaitForLogMatch.
+
+ Args:
+ clear: If True the existing logcat output will be cleared, to avoiding
+ matching historical output lurking in the log.
+ timeout: How long WaitForLogMatch will wait for the given match
+ filters: A list of logcat filters to be used.
+ """
+ if clear:
+ self.RunShellCommand('logcat -c')
+ args = []
+ if self._adb._target_arg:
+ args += shlex.split(self._adb._target_arg)
+ args += ['logcat', '-v', 'threadtime']
+ if filters:
+ args.extend(filters)
+ else:
+ args.append('*:v')
+
+ if logfile:
+ logfile = NewLineNormalizer(logfile)
+
+ # Spawn logcat and syncronize with it.
+ for _ in range(4):
+ self._logcat = pexpect.spawn('adb', args, timeout=timeout,
+ logfile=logfile)
+ self.RunShellCommand('log startup_sync')
+ if self._logcat.expect(['startup_sync', pexpect.EOF,
+ pexpect.TIMEOUT]) == 0:
+ break
+ self._logcat.close(force=True)
+ else:
+ logging.critical('Error reading from logcat: ' + str(self._logcat.match))
+ sys.exit(1)
+
+ def GetMonitoredLogCat(self):
+ """Returns an "adb logcat" command as created by pexpected.spawn."""
+ if not self._logcat:
+ self.StartMonitoringLogcat(clear=False)
+ return self._logcat
+
+ def WaitForLogMatch(self, success_re, error_re, clear=False):
+ """Blocks until a matching line is logged or a timeout occurs.
+
+ Args:
+ success_re: A compiled re to search each line for.
+ error_re: A compiled re which, if found, terminates the search for
+ |success_re|. If None is given, no error condition will be detected.
+ clear: If True the existing logcat output will be cleared, defaults to
+ false.
+
+ Raises:
+ pexpect.TIMEOUT upon the timeout specified by StartMonitoringLogcat().
+
+ Returns:
+ The re match object if |success_re| is matched first or None if |error_re|
+ is matched first.
+ """
+ logging.info('<<< Waiting for logcat:' + str(success_re.pattern))
+ t0 = time.time()
+ while True:
+ if not self._logcat:
+ self.StartMonitoringLogcat(clear)
+ try:
+ while True:
+ # Note this will block for upto the timeout _per log line_, so we need
+ # to calculate the overall timeout remaining since t0.
+ time_remaining = t0 + self._logcat.timeout - time.time()
+ if time_remaining < 0: raise pexpect.TIMEOUT(self._logcat)
+ self._logcat.expect(PEXPECT_LINE_RE, timeout=time_remaining)
+ line = self._logcat.match.group(1)
+ if error_re:
+ error_match = error_re.search(line)
+ if error_match:
+ return None
+ success_match = success_re.search(line)
+ if success_match:
+ return success_match
+ logging.info('<<< Skipped Logcat Line:' + str(line))
+ except pexpect.TIMEOUT:
+ raise pexpect.TIMEOUT(
+ 'Timeout (%ds) exceeded waiting for pattern "%s" (tip: use -vv '
+ 'to debug)' %
+ (self._logcat.timeout, success_re.pattern))
+ except pexpect.EOF:
+ # It seems that sometimes logcat can end unexpectedly. This seems
+ # to happen during Chrome startup after a reboot followed by a cache
+ # clean. I don't understand why this happens, but this code deals with
+ # getting EOF in logcat.
+ logging.critical('Found EOF in adb logcat. Restarting...')
+ # Rerun spawn with original arguments. Note that self._logcat.args[0] is
+ # the path of adb, so we don't want it in the arguments.
+ self._logcat = pexpect.spawn('adb',
+ self._logcat.args[1:],
+ timeout=self._logcat.timeout,
+ logfile=self._logcat.logfile)
+
+ def StartRecordingLogcat(self, clear=True, filters=['*:v']):
+ """Starts recording logcat output to eventually be saved as a string.
+
+ This call should come before some series of tests are run, with either
+ StopRecordingLogcat or SearchLogcatRecord following the tests.
+
+ Args:
+ clear: True if existing log output should be cleared.
+ filters: A list of logcat filters to be used.
+ """
+ if clear:
+ self._adb.SendCommand('logcat -c')
+ logcat_command = 'adb %s logcat -v threadtime %s' % (self._adb._target_arg,
+ ' '.join(filters))
+ self.logcat_process = subprocess.Popen(logcat_command, shell=True,
+ stdout=subprocess.PIPE)
+
+ def StopRecordingLogcat(self):
+ """Stops an existing logcat recording subprocess and returns output.
+
+ Returns:
+ The logcat output as a string or an empty string if logcat was not
+ being recorded at the time.
+ """
+ if not self.logcat_process:
+ return ''
+ # Cannot evaluate directly as 0 is a possible value.
+ # Better to read the self.logcat_process.stdout before killing it,
+ # Otherwise the communicate may return incomplete output due to pipe break.
+ if self.logcat_process.poll() is None:
+ self.logcat_process.kill()
+ (output, _) = self.logcat_process.communicate()
+ self.logcat_process = None
+ return output
+
+ def SearchLogcatRecord(self, record, message, thread_id=None, proc_id=None,
+ log_level=None, component=None):
+ """Searches the specified logcat output and returns results.
+
+ This method searches through the logcat output specified by record for a
+ certain message, narrowing results by matching them against any other
+ specified criteria. It returns all matching lines as described below.
+
+ Args:
+ record: A string generated by Start/StopRecordingLogcat to search.
+ message: An output string to search for.
+ thread_id: The thread id that is the origin of the message.
+ proc_id: The process that is the origin of the message.
+ log_level: The log level of the message.
+ component: The name of the component that would create the message.
+
+ Returns:
+ A list of dictionaries represeting matching entries, each containing keys
+ thread_id, proc_id, log_level, component, and message.
+ """
+ if thread_id:
+ thread_id = str(thread_id)
+ if proc_id:
+ proc_id = str(proc_id)
+ results = []
+ reg = re.compile('(\d+)\s+(\d+)\s+([A-Z])\s+([A-Za-z]+)\s*:(.*)$',
+ re.MULTILINE)
+ log_list = reg.findall(record)
+ for (tid, pid, log_lev, comp, msg) in log_list:
+ if ((not thread_id or thread_id == tid) and
+ (not proc_id or proc_id == pid) and
+ (not log_level or log_level == log_lev) and
+ (not component or component == comp) and msg.find(message) > -1):
+ match = dict({'thread_id': tid, 'proc_id': pid,
+ 'log_level': log_lev, 'component': comp,
+ 'message': msg})
+ results.append(match)
+ return results
+
+ def ExtractPid(self, process_name):
+ """Extracts Process Ids for a given process name from Android Shell.
+
+ Args:
+ process_name: name of the process on the device.
+
+ Returns:
+ List of all the process ids (as strings) that match the given name.
+ If the name of a process exactly matches the given name, the pid of
+ that process will be inserted to the front of the pid list.
+ """
+ pids = []
+ for line in self.RunShellCommand('ps', log_result=False):
+ data = line.split()
+ try:
+ if process_name in data[-1]: # name is in the last column
+ if process_name == data[-1]:
+ pids.insert(0, data[1]) # PID is in the second column
+ else:
+ pids.append(data[1])
+ except IndexError:
+ pass
+ return pids
+
+ def GetIoStats(self):
+ """Gets cumulative disk IO stats since boot (for all processes).
+
+ Returns:
+ Dict of {num_reads, num_writes, read_ms, write_ms} or None if there
+ was an error.
+ """
+ for line in self.GetFileContents('/proc/diskstats', log_result=False):
+ stats = io_stats_parser.ParseIoStatsLine(line)
+ if stats.device == 'mmcblk0':
+ return {
+ 'num_reads': stats.num_reads_issued,
+ 'num_writes': stats.num_writes_completed,
+ 'read_ms': stats.ms_spent_reading,
+ 'write_ms': stats.ms_spent_writing,
+ }
+ logging.warning('Could not find disk IO stats.')
+ return None
+
+ def GetMemoryUsageForPid(self, pid):
+ """Returns the memory usage for given pid.
+
+ Args:
+ pid: The pid number of the specific process running on device.
+
+ Returns:
+ A tuple containg:
+ [0]: Dict of {metric:usage_kb}, for the process which has specified pid.
+ The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+ Shared_Dirty, Private_Clean, Private_Dirty, Referenced, Swap,
+ KernelPageSize, MMUPageSize, Nvidia (tablet only).
+ [1]: Detailed /proc/[PID]/smaps information.
+ """
+ usage_dict = collections.defaultdict(int)
+ smaps = collections.defaultdict(dict)
+ current_smap = ''
+ for line in self.GetFileContents('/proc/%s/smaps' % pid, log_result=False):
+ items = line.split()
+ # See man 5 proc for more details. The format is:
+ # address perms offset dev inode pathname
+ if len(items) > 5:
+ current_smap = ' '.join(items[5:])
+ elif len(items) > 3:
+ current_smap = ' '.join(items[3:])
+ match = re.match(MEMORY_INFO_RE, line)
+ if match:
+ key = match.group('key')
+ usage_kb = int(match.group('usage_kb'))
+ usage_dict[key] += usage_kb
+ if key not in smaps[current_smap]:
+ smaps[current_smap][key] = 0
+ smaps[current_smap][key] += usage_kb
+ if not usage_dict or not any(usage_dict.values()):
+ # Presumably the process died between ps and calling this method.
+ logging.warning('Could not find memory usage for pid ' + str(pid))
+
+ for line in self.GetFileContents('/d/nvmap/generic-0/clients',
+ log_result=False):
+ match = re.match(NVIDIA_MEMORY_INFO_RE, line)
+ if match and match.group('pid') == pid:
+ usage_bytes = int(match.group('usage_bytes'))
+ usage_dict['Nvidia'] = int(round(usage_bytes / 1000.0)) # kB
+ break
+
+ return (usage_dict, smaps)
+
+ def GetMemoryUsageForPackage(self, package):
+ """Returns the memory usage for all processes whose name contains |pacakge|.
+
+ Args:
+ package: A string holding process name to lookup pid list for.
+
+ Returns:
+ A tuple containg:
+ [0]: Dict of {metric:usage_kb}, summed over all pids associated with
+ |name|.
+ The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+ Shared_Dirty, Private_Clean, Private_Dirty, Referenced, Swap,
+ KernelPageSize, MMUPageSize, Nvidia (tablet only).
+ [1]: a list with detailed /proc/[PID]/smaps information.
+ """
+ usage_dict = collections.defaultdict(int)
+ pid_list = self.ExtractPid(package)
+ smaps = collections.defaultdict(dict)
+
+ for pid in pid_list:
+ usage_dict_per_pid, smaps_per_pid = self.GetMemoryUsageForPid(pid)
+ smaps[pid] = smaps_per_pid
+ for (key, value) in usage_dict_per_pid.items():
+ usage_dict[key] += value
+
+ return usage_dict, smaps
+
+ def ProcessesUsingDevicePort(self, device_port):
+ """Lists processes using the specified device port on loopback interface.
+
+ Args:
+ device_port: Port on device we want to check.
+
+ Returns:
+ A list of (pid, process_name) tuples using the specified port.
+ """
+ tcp_results = self.RunShellCommand('cat /proc/net/tcp', log_result=False)
+ tcp_address = '0100007F:%04X' % device_port
+ pids = []
+ for single_connect in tcp_results:
+ connect_results = single_connect.split()
+ # Column 1 is the TCP port, and Column 9 is the inode of the socket
+ if connect_results[1] == tcp_address:
+ socket_inode = connect_results[9]
+ socket_name = 'socket:[%s]' % socket_inode
+ lsof_results = self.RunShellCommand('lsof', log_result=False)
+ for single_process in lsof_results:
+ process_results = single_process.split()
+ # Ignore the line if it has less than nine columns in it, which may
+ # be the case when a process stops while lsof is executing.
+ if len(process_results) <= 8:
+ continue
+ # Column 0 is the executable name
+ # Column 1 is the pid
+ # Column 8 is the Inode in use
+ if process_results[8] == socket_name:
+ pids.append((int(process_results[1]), process_results[0]))
+ break
+ logging.info('PidsUsingDevicePort: %s', pids)
+ return pids
+
+ def FileExistsOnDevice(self, file_name):
+ """Checks whether the given file exists on the device.
+
+ Args:
+ file_name: Full path of file to check.
+
+ Returns:
+ True if the file exists, False otherwise.
+ """
+ assert '"' not in file_name, 'file_name cannot contain double quotes'
+ status = self._adb.SendShellCommand(
+ '\'test -e "%s"; echo $?\'' % (file_name))
+ if 'test: not found' not in status:
+ return int(status) == 0
+
+ status = self._adb.SendShellCommand(
+ '\'ls "%s" >/dev/null 2>&1; echo $?\'' % (file_name))
+ return int(status) == 0
+
+
+class NewLineNormalizer(object):
+ """A file-like object to normalize EOLs to '\n'.
+
+ Pexpect runs adb within a pseudo-tty device (see
+ http://www.noah.org/wiki/pexpect), so any '\n' printed by adb is written
+ as '\r\n' to the logfile. Since adb already uses '\r\n' to terminate
+ lines, the log ends up having '\r\r\n' at the end of each line. This
+ filter replaces the above with a single '\n' in the data stream.
+ """
+ def __init__(self, output):
+ self._output = output
+
+ def write(self, data):
+ data = data.replace('\r\r\n', '\n')
+ self._output.write(data)
+
+ def flush(self):
+ self._output.flush()
+
diff --git a/media/webrtc/trunk/build/android/pylib/apk_info.py b/media/webrtc/trunk/build/android/pylib/apk_info.py
new file mode 100644
index 000000000..7e8867570
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/apk_info.py
@@ -0,0 +1,142 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Gathers information about APKs."""
+
+import collections
+import os
+import re
+
+import cmd_helper
+
+
+class ApkInfo(object):
+ """Helper class for inspecting APKs."""
+ _PROGUARD_PATH = os.path.join(os.environ['ANDROID_SDK_ROOT'],
+ 'tools/proguard/bin/proguard.sh')
+ if not os.path.exists(_PROGUARD_PATH):
+ _PROGUARD_PATH = os.path.join(os.environ['ANDROID_BUILD_TOP'],
+ 'external/proguard/bin/proguard.sh')
+ _PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+ _PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+ _PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+ _PROGUARD_ANNOTATION_CONST_RE = re.compile(r'\s*?- Constant element value.*$')
+ _PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+ _AAPT_PACKAGE_NAME_RE = re.compile(r'package: .*name=\'(\S*)\'')
+
+ def __init__(self, apk_path, jar_path):
+ if not os.path.exists(apk_path):
+ raise Exception('%s not found, please build it' % apk_path)
+ self._apk_path = apk_path
+ if not os.path.exists(jar_path):
+ raise Exception('%s not found, please build it' % jar_path)
+ self._jar_path = jar_path
+ self._annotation_map = collections.defaultdict(list)
+ self._test_methods = []
+ self._Initialize()
+
+ def _Initialize(self):
+ proguard_output = cmd_helper.GetCmdOutput([self._PROGUARD_PATH,
+ '-injars', self._jar_path,
+ '-dontshrink',
+ '-dontoptimize',
+ '-dontobfuscate',
+ '-dontpreverify',
+ '-dump',
+ ]).split('\n')
+ clazz = None
+ method = None
+ annotation = None
+ has_value = False
+ qualified_method = None
+ for line in proguard_output:
+ m = self._PROGUARD_CLASS_RE.match(line)
+ if m:
+ clazz = m.group(1).replace('/', '.') # Change package delim.
+ annotation = None
+ continue
+ m = self._PROGUARD_METHOD_RE.match(line)
+ if m:
+ method = m.group(1)
+ annotation = None
+ qualified_method = clazz + '#' + method
+ if method.startswith('test') and clazz.endswith('Test'):
+ self._test_methods += [qualified_method]
+ continue
+ m = self._PROGUARD_ANNOTATION_RE.match(line)
+ if m:
+ assert qualified_method
+ annotation = m.group(1).split('/')[-1] # Ignore the annotation package.
+ self._annotation_map[qualified_method].append(annotation)
+ has_value = False
+ continue
+ if annotation:
+ assert qualified_method
+ if not has_value:
+ m = self._PROGUARD_ANNOTATION_CONST_RE.match(line)
+ if m:
+ has_value = True
+ else:
+ m = self._PROGUARD_ANNOTATION_VALUE_RE.match(line)
+ if m:
+ value = m.group(1)
+ self._annotation_map[qualified_method].append(
+ annotation + ':' + value)
+ has_value = False
+
+ def _GetAnnotationMap(self):
+ return self._annotation_map
+
+ def _IsTestMethod(self, test):
+ class_name, method = test.split('#')
+ return class_name.endswith('Test') and method.startswith('test')
+
+ def GetApkPath(self):
+ return self._apk_path
+
+ def GetPackageName(self):
+ """Returns the package name of this APK."""
+ aapt_output = cmd_helper.GetCmdOutput(
+ ['aapt', 'dump', 'badging', self._apk_path]).split('\n')
+ for line in aapt_output:
+ m = self._AAPT_PACKAGE_NAME_RE.match(line)
+ if m:
+ return m.group(1)
+ raise Exception('Failed to determine package name of %s' % self._apk_path)
+
+ def GetTestAnnotations(self, test):
+ """Returns a list of all annotations for the given |test|. May be empty."""
+ if not self._IsTestMethod(test):
+ return []
+ return self._GetAnnotationMap()[test]
+
+ def _AnnotationsMatchFilters(self, annotation_filter_list, annotations):
+ """Checks if annotations match any of the filters."""
+ if not annotation_filter_list:
+ return True
+ for annotation_filter in annotation_filter_list:
+ filters = annotation_filter.split('=')
+ if len(filters) == 2:
+ key = filters[0]
+ value_list = filters[1].split(',')
+ for value in value_list:
+ if key + ':' + value in annotations:
+ return True
+ elif annotation_filter in annotations:
+ return True
+ return False
+
+ def GetAnnotatedTests(self, annotation_filter_list):
+ """Returns a list of all tests that match the given annotation filters."""
+ return [test for test, annotations in self._GetAnnotationMap().iteritems()
+ if self._IsTestMethod(test) and self._AnnotationsMatchFilters(
+ annotation_filter_list, annotations)]
+
+ def GetTestMethods(self):
+ """Returns a list of all test methods in this apk as Class#testMethod."""
+ return self._test_methods
+
+ @staticmethod
+ def IsPythonDrivenTest(test):
+ return 'pythonDrivenTests' in test
diff --git a/media/webrtc/trunk/build/android/pylib/base_test_runner.py b/media/webrtc/trunk/build/android/pylib/base_test_runner.py
new file mode 100644
index 000000000..619bc6edf
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/base_test_runner.py
@@ -0,0 +1,210 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import httplib
+import logging
+import os
+import tempfile
+import time
+
+import android_commands
+import constants
+from chrome_test_server_spawner import SpawningServer
+import constants
+from flag_changer import FlagChanger
+from forwarder import Forwarder
+import lighttpd_server
+import ports
+from valgrind_tools import CreateTool
+
+
+# A file on device to store ports of net test server. The format of the file is
+# test-spawner-server-port:test-server-port
+NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports'
+
+
+class BaseTestRunner(object):
+ """Base class for running tests on a single device.
+
+ A subclass should implement RunTests() with no parameter, so that calling
+ the Run() method will set up tests, run them and tear them down.
+ """
+
+ def __init__(self, device, tool, shard_index, build_type):
+ """
+ Args:
+ device: Tests will run on the device of this ID.
+ shard_index: Index number of the shard on which the test suite will run.
+ build_type: 'Release' or 'Debug'.
+ """
+ self.device = device
+ self.adb = android_commands.AndroidCommands(device=device)
+ self.tool = CreateTool(tool, self.adb)
+ self._http_server = None
+ self._forwarder = None
+ self._forwarder_device_port = 8000
+ self.forwarder_base_url = ('http://localhost:%d' %
+ self._forwarder_device_port)
+ self.flags = FlagChanger(self.adb)
+ self.shard_index = shard_index
+ self.flags.AddFlags(['--disable-fre'])
+ self._spawning_server = None
+ self._spawner_forwarder = None
+ # We will allocate port for test server spawner when calling method
+ # LaunchChromeTestServerSpawner and allocate port for test server when
+ # starting it in TestServerThread.
+ self.test_server_spawner_port = 0
+ self.test_server_port = 0
+ self.build_type = build_type
+
+ def _PushTestServerPortInfoToDevice(self):
+ """Pushes the latest port information to device."""
+ self.adb.SetFileContents(self.adb.GetExternalStorage() + '/' +
+ NET_TEST_SERVER_PORT_INFO_FILE,
+ '%d:%d' % (self.test_server_spawner_port,
+ self.test_server_port))
+
+ def Run(self):
+ """Calls subclass functions to set up tests, run them and tear them down.
+
+ Returns:
+ Test results returned from RunTests().
+ """
+ if not self.HasTests():
+ return True
+ self.SetUp()
+ try:
+ return self.RunTests()
+ finally:
+ self.TearDown()
+
+ def SetUp(self):
+ """Called before tests run."""
+ pass
+
+ def HasTests(self):
+ """Whether the test suite has tests to run."""
+ return True
+
+ def RunTests(self):
+ """Runs the tests. Need to be overridden."""
+ raise NotImplementedError
+
+ def TearDown(self):
+ """Called when tests finish running."""
+ self.ShutdownHelperToolsForTestSuite()
+
+ def CopyTestData(self, test_data_paths, dest_dir):
+ """Copies |test_data_paths| list of files/directories to |dest_dir|.
+
+ Args:
+ test_data_paths: A list of files or directories relative to |dest_dir|
+ which should be copied to the device. The paths must exist in
+ |CHROME_DIR|.
+ dest_dir: Absolute path to copy to on the device.
+ """
+ for p in test_data_paths:
+ self.adb.PushIfNeeded(
+ os.path.join(constants.CHROME_DIR, p),
+ os.path.join(dest_dir, p))
+
+ def LaunchTestHttpServer(self, document_root, port=None,
+ extra_config_contents=None):
+ """Launches an HTTP server to serve HTTP tests.
+
+ Args:
+ document_root: Document root of the HTTP server.
+ port: port on which we want to the http server bind.
+ extra_config_contents: Extra config contents for the HTTP server.
+ """
+ self._http_server = lighttpd_server.LighttpdServer(
+ document_root, port=port, extra_config_contents=extra_config_contents)
+ if self._http_server.StartupHttpServer():
+ logging.info('http server started: http://localhost:%s',
+ self._http_server.port)
+ else:
+ logging.critical('Failed to start http server')
+ self.StartForwarderForHttpServer()
+ return (self._forwarder_device_port, self._http_server.port)
+
+ def StartForwarder(self, port_pairs):
+ """Starts TCP traffic forwarding for the given |port_pairs|.
+
+ Args:
+ host_port_pairs: A list of (device_port, local_port) tuples to forward.
+ """
+ if self._forwarder:
+ self._forwarder.Close()
+ self._forwarder = Forwarder(
+ self.adb, port_pairs, self.tool, '127.0.0.1', self.build_type)
+
+ def StartForwarderForHttpServer(self):
+ """Starts a forwarder for the HTTP server.
+
+ The forwarder forwards HTTP requests and responses between host and device.
+ """
+ self.StartForwarder([(self._forwarder_device_port, self._http_server.port)])
+
+ def RestartHttpServerForwarderIfNecessary(self):
+ """Restarts the forwarder if it's not open."""
+ # Checks to see if the http server port is being used. If not forwards the
+ # request.
+ # TODO(dtrainor): This is not always reliable because sometimes the port
+ # will be left open even after the forwarder has been killed.
+ if not ports.IsDevicePortUsed(self.adb,
+ self._forwarder_device_port):
+ self.StartForwarderForHttpServer()
+
+ def ShutdownHelperToolsForTestSuite(self):
+ """Shuts down the server and the forwarder."""
+ # Forwarders should be killed before the actual servers they're forwarding
+ # to as they are clients potentially with open connections and to allow for
+ # proper hand-shake/shutdown.
+ if self._forwarder or self._spawner_forwarder:
+ # Kill all forwarders on the device and then kill the process on the host
+ # (if it exists)
+ self.adb.KillAll('device_forwarder')
+ if self._forwarder:
+ self._forwarder.Close()
+ if self._spawner_forwarder:
+ self._spawner_forwarder.Close()
+ if self._http_server:
+ self._http_server.ShutdownHttpServer()
+ if self._spawning_server:
+ self._spawning_server.Stop()
+ self.flags.Restore()
+
+ def LaunchChromeTestServerSpawner(self):
+ """Launches test server spawner."""
+ server_ready = False
+ error_msgs = []
+ # Try 3 times to launch test spawner server.
+ for i in xrange(0, 3):
+ # Do not allocate port for test server here. We will allocate
+ # different port for individual test in TestServerThread.
+ self.test_server_spawner_port = ports.AllocateTestServerPort()
+ self._spawning_server = SpawningServer(self.test_server_spawner_port,
+ self.adb,
+ self.tool,
+ self.build_type)
+ self._spawning_server.Start()
+ server_ready, error_msg = ports.IsHttpServerConnectable(
+ '127.0.0.1', self.test_server_spawner_port, path='/ping',
+ expected_read='ready')
+ if server_ready:
+ break
+ else:
+ error_msgs.append(error_msg)
+ self._spawning_server.Stop()
+ # Wait for 2 seconds then restart.
+ time.sleep(2)
+ if not server_ready:
+ logging.error(';'.join(error_msgs))
+ raise Exception('Can not start the test spawner server.')
+ self._PushTestServerPortInfoToDevice()
+ self._spawner_forwarder = Forwarder(
+ self.adb,
+ [(self.test_server_spawner_port, self.test_server_spawner_port)],
+ self.tool, '127.0.0.1', self.build_type)
diff --git a/media/webrtc/trunk/build/android/pylib/base_test_sharder.py b/media/webrtc/trunk/build/android/pylib/base_test_sharder.py
new file mode 100644
index 000000000..48206c202
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/base_test_sharder.py
@@ -0,0 +1,113 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import multiprocessing
+
+from test_result import TestResults
+
+
+def _ShardedTestRunnable(test):
+ """Standalone function needed by multiprocessing.Pool."""
+ log_format = '[' + test.device + '] # %(asctime)-15s: %(message)s'
+ if logging.getLogger().handlers:
+ logging.getLogger().handlers[0].setFormatter(logging.Formatter(log_format))
+ else:
+ logging.basicConfig(format=log_format)
+ # Handle SystemExit here since python has a bug to exit current process
+ try:
+ return test.Run()
+ except SystemExit:
+ return TestResults()
+
+def SetTestsContainer(tests_container):
+ """Sets tests container.
+
+ multiprocessing.Queue can't be pickled across processes, so we need to set
+ this as a 'global', per process, via multiprocessing.Pool.
+ """
+ BaseTestSharder.tests_container = tests_container
+
+
+class BaseTestSharder(object):
+ """Base class for sharding tests across multiple devices.
+
+ Args:
+ attached_devices: A list of attached devices.
+ """
+ # See more in SetTestsContainer.
+ tests_container = None
+
+ def __init__(self, attached_devices):
+ self.attached_devices = attached_devices
+ self.retries = 1
+ self.tests = []
+
+ def CreateShardedTestRunner(self, device, index):
+ """Factory function to create a suite-specific test runner.
+
+ Args:
+ device: Device serial where this shard will run
+ index: Index of this device in the pool.
+
+ Returns:
+ An object of BaseTestRunner type (that can provide a "Run()" method).
+ """
+ pass
+
+ def SetupSharding(self, tests):
+ """Called before starting the shards."""
+ pass
+
+ def OnTestsCompleted(self, test_runners, test_results):
+ """Notifies that we completed the tests."""
+ pass
+
+ def RunShardedTests(self):
+ """Runs the tests in all connected devices.
+
+ Returns:
+ A TestResults object.
+ """
+ logging.warning('*' * 80)
+ logging.warning('Sharding in ' + str(len(self.attached_devices)) +
+ ' devices.')
+ logging.warning('Note that the output is not synchronized.')
+ logging.warning('Look for the "Final result" banner in the end.')
+ logging.warning('*' * 80)
+ final_results = TestResults()
+ for retry in xrange(self.retries):
+ logging.warning('Try %d of %d', retry + 1, self.retries)
+ self.SetupSharding(self.tests)
+ test_runners = []
+ for index, device in enumerate(self.attached_devices):
+ logging.warning('*' * 80)
+ logging.warning('Creating shard %d for %s', index, device)
+ logging.warning('*' * 80)
+ test_runner = self.CreateShardedTestRunner(device, index)
+ test_runners += [test_runner]
+ logging.warning('Starting...')
+ pool = multiprocessing.Pool(len(self.attached_devices),
+ SetTestsContainer,
+ [BaseTestSharder.tests_container])
+ # map can't handle KeyboardInterrupt exception. It's a python bug.
+ # So use map_async instead.
+ async_results = pool.map_async(_ShardedTestRunnable, test_runners)
+ results_lists = async_results.get(999999)
+ test_results = TestResults.FromTestResults(results_lists)
+ if retry == self.retries - 1:
+ all_passed = final_results.ok + test_results.ok
+ final_results = test_results
+ final_results.ok = all_passed
+ break
+ else:
+ final_results.ok += test_results.ok
+ self.tests = []
+ for t in test_results.GetAllBroken():
+ self.tests += [t.name]
+ if not self.tests:
+ break
+ self.OnTestsCompleted(test_runners, final_results)
+ return final_results
diff --git a/media/webrtc/trunk/build/android/pylib/buildbot_report.py b/media/webrtc/trunk/build/android/pylib/buildbot_report.py
new file mode 100644
index 000000000..fe3fcd638
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/buildbot_report.py
@@ -0,0 +1,46 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to print buildbot messages."""
+
+def PrintLink(label, url):
+ """Adds a link with name |label| linking to |url| to current buildbot step.
+
+ Args:
+ label: A string with the name of the label.
+ url: A string of the URL.
+ """
+ print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+
+
+def PrintMsg(msg):
+ """Appends |msg| to the current buildbot step text.
+
+ Args:
+ msg: String to be appended.
+ """
+ print '@@@STEP_TEXT@%s@@@' % msg
+
+
+def PrintSummaryText(msg):
+ """Appends |msg| to main build summary. Visible from waterfall.
+
+ Args:
+ msg: String to be appended.
+ """
+ print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg
+
+
+def PrintError():
+ """Marks the current step as failed."""
+ print '@@@STEP_FAILURE@@@'
+
+
+def PrintWarning():
+ """Marks the current step with a warning."""
+ print '@@@STEP_WARNINGS@@@'
+
+
+def PrintNamedStep(step):
+ print '@@@BUILD_STEP %s@@@' % step
diff --git a/media/webrtc/trunk/build/android/pylib/chrome_test_server_spawner.py b/media/webrtc/trunk/build/android/pylib/chrome_test_server_spawner.py
new file mode 100644
index 000000000..512a6091d
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/chrome_test_server_spawner.py
@@ -0,0 +1,402 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+
+import BaseHTTPServer
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import threading
+import time
+import urlparse
+
+import constants
+from forwarder import Forwarder
+import ports
+
+
+# Path that are needed to import necessary modules when running testserver.py.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + ':%s:%s:%s:%s' % (
+ os.path.join(constants.CHROME_DIR, 'third_party'),
+ os.path.join(constants.CHROME_DIR, 'third_party', 'tlslite'),
+ os.path.join(constants.CHROME_DIR, 'third_party', 'pyftpdlib', 'src'),
+ os.path.join(constants.CHROME_DIR, 'net', 'tools', 'testserver'))
+
+
+SERVER_TYPES = {
+ 'http': '',
+ 'ftp': '-f',
+ 'sync': '--sync',
+ 'tcpecho': '--tcp-echo',
+ 'udpecho': '--udp-echo',
+}
+
+
+# The timeout (in seconds) of starting up the Python test server.
+TEST_SERVER_STARTUP_TIMEOUT = 10
+
+
+def _CheckPortStatus(port, expected_status):
+ """Returns True if port has expected_status.
+
+ Args:
+ port: the port number.
+ expected_status: boolean of expected status.
+
+ Returns:
+ Returns True if the status is expected. Otherwise returns False.
+ """
+ for timeout in range(1, 5):
+ if ports.IsHostPortUsed(port) == expected_status:
+ return True
+ time.sleep(timeout)
+ return False
+
+
+def _GetServerTypeCommandLine(server_type):
+ """Returns the command-line by the given server type.
+
+ Args:
+ server_type: the server type to be used (e.g. 'http').
+
+ Returns:
+ A string containing the command-line argument.
+ """
+ if server_type not in SERVER_TYPES:
+ raise NotImplementedError('Unknown server type: %s' % server_type)
+ if server_type == 'udpecho':
+ raise Exception('Please do not run UDP echo tests because we do not have '
+ 'a UDP forwarder tool.')
+ return SERVER_TYPES[server_type]
+
+
+class TestServerThread(threading.Thread):
+ """A thread to run the test server in a separate process."""
+
+ def __init__(self, ready_event, arguments, adb, tool, build_type):
+ """Initialize TestServerThread with the following argument.
+
+ Args:
+ ready_event: event which will be set when the test server is ready.
+ arguments: dictionary of arguments to run the test server.
+ adb: instance of AndroidCommands.
+ tool: instance of runtime error detection tool.
+ build_type: 'Release' or 'Debug'.
+ """
+ threading.Thread.__init__(self)
+ self.wait_event = threading.Event()
+ self.stop_flag = False
+ self.ready_event = ready_event
+ self.ready_event.clear()
+ self.arguments = arguments
+ self.adb = adb
+ self.tool = tool
+ self.test_server_process = None
+ self.is_ready = False
+ self.host_port = self.arguments['port']
+ assert isinstance(self.host_port, int)
+ self._test_server_forwarder = None
+ # The forwarder device port now is dynamically allocated.
+ self.forwarder_device_port = 0
+ # Anonymous pipe in order to get port info from test server.
+ self.pipe_in = None
+ self.pipe_out = None
+ self.command_line = []
+ self.build_type = build_type
+
+ def _WaitToStartAndGetPortFromTestServer(self):
+ """Waits for the Python test server to start and gets the port it is using.
+
+ The port information is passed by the Python test server with a pipe given
+ by self.pipe_out. It is written as a result to |self.host_port|.
+
+ Returns:
+ Whether the port used by the test server was successfully fetched.
+ """
+ assert self.host_port == 0 and self.pipe_out and self.pipe_in
+ (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+ TEST_SERVER_STARTUP_TIMEOUT)
+ if len(in_fds) == 0:
+ logging.error('Failed to wait to the Python test server to be started.')
+ return False
+ # First read the data length as an unsigned 4-byte value. This
+ # is _not_ using network byte ordering since the Python test server packs
+ # size as native byte order and all Chromium platforms so far are
+ # configured to use little-endian.
+ # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+ # use a unified byte order (either big-endian or little-endian).
+ data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+ if data_length:
+ (data_length,) = struct.unpack('=L', data_length)
+ assert data_length
+ if not data_length:
+ logging.error('Failed to get length of server data.')
+ return False
+ port_json = os.read(self.pipe_in, data_length)
+ if not port_json:
+ logging.error('Failed to get server data.')
+ return False
+ logging.info('Got port json data: %s', port_json)
+ port_json = json.loads(port_json)
+ if port_json.has_key('port') and isinstance(port_json['port'], int):
+ self.host_port = port_json['port']
+ return _CheckPortStatus(self.host_port, True)
+ logging.error('Failed to get port information from the server data.')
+ return False
+
+ def _GenerateCommandLineArguments(self):
+ """Generates the command line to run the test server.
+
+ Note that all options are processed by following the definitions in
+ testserver.py.
+ """
+ if self.command_line:
+ return
+ # The following arguments must exist.
+ type_cmd = _GetServerTypeCommandLine(self.arguments['server-type'])
+ if type_cmd:
+ self.command_line.append(type_cmd)
+ self.command_line.append('--port=%d' % self.host_port)
+ # Use a pipe to get the port given by the instance of Python test server
+ # if the test does not specify the port.
+ if self.host_port == 0:
+ (self.pipe_in, self.pipe_out) = os.pipe()
+ self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+ self.command_line.append('--host=%s' % self.arguments['host'])
+ data_dir = self.arguments['data-dir'] or 'chrome/test/data'
+ if not os.path.isabs(data_dir):
+ data_dir = os.path.join(constants.CHROME_DIR, data_dir)
+ self.command_line.append('--data-dir=%s' % data_dir)
+ # The following arguments are optional depending on the individual test.
+ if self.arguments.has_key('log-to-console'):
+ self.command_line.append('--log-to-console')
+ if self.arguments.has_key('auth-token'):
+ self.command_line.append('--auth-token=%s' % self.arguments['auth-token'])
+ if self.arguments.has_key('https'):
+ self.command_line.append('--https')
+ if self.arguments.has_key('cert-and-key-file'):
+ self.command_line.append('--cert-and-key-file=%s' % os.path.join(
+ constants.CHROME_DIR, self.arguments['cert-and-key-file']))
+ if self.arguments.has_key('ocsp'):
+ self.command_line.append('--ocsp=%s' % self.arguments['ocsp'])
+ if self.arguments.has_key('https-record-resume'):
+ self.command_line.append('--https-record-resume')
+ if self.arguments.has_key('ssl-client-auth'):
+ self.command_line.append('--ssl-client-auth')
+ if self.arguments.has_key('tls-intolerant'):
+ self.command_line.append('--tls-intolerant=%s' %
+ self.arguments['tls-intolerant'])
+ if self.arguments.has_key('ssl-client-ca'):
+ for ca in self.arguments['ssl-client-ca']:
+ self.command_line.append('--ssl-client-ca=%s' %
+ os.path.join(constants.CHROME_DIR, ca))
+ if self.arguments.has_key('ssl-bulk-cipher'):
+ for bulk_cipher in self.arguments['ssl-bulk-cipher']:
+ self.command_line.append('--ssl-bulk-cipher=%s' % bulk_cipher)
+
+ def run(self):
+ logging.info('Start running the thread!')
+ self.wait_event.clear()
+ self._GenerateCommandLineArguments()
+ command = [os.path.join(constants.CHROME_DIR, 'net', 'tools',
+ 'testserver', 'testserver.py')] + self.command_line
+ logging.info('Running: %s', command)
+ self.process = subprocess.Popen(command)
+ if self.process:
+ if self.pipe_out:
+ self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+ else:
+ self.is_ready = _CheckPortStatus(self.host_port, True)
+ if self.is_ready:
+ self._test_server_forwarder = Forwarder(
+ self.adb, [(0, self.host_port)], self.tool, '127.0.0.1',
+ self.build_type)
+ # Check whether the forwarder is ready on the device.
+ self.is_ready = False
+ device_port = self._test_server_forwarder.DevicePortForHostPort(
+ self.host_port)
+ if device_port:
+ for timeout in range(1, 5):
+ if ports.IsDevicePortUsed(self.adb, device_port, 'LISTEN'):
+ self.is_ready = True
+ self.forwarder_device_port = device_port
+ break
+ time.sleep(timeout)
+ # Wake up the request handler thread.
+ self.ready_event.set()
+ # Keep thread running until Stop() gets called.
+ while not self.stop_flag:
+ time.sleep(1)
+ if self.process.poll() is None:
+ self.process.kill()
+ if self._test_server_forwarder:
+ self._test_server_forwarder.Close()
+ self.process = None
+ self.is_ready = False
+ if self.pipe_out:
+ os.close(self.pipe_in)
+ os.close(self.pipe_out)
+ self.pipe_in = None
+ self.pipe_out = None
+ logging.info('Test-server has died.')
+ self.wait_event.set()
+
+ def Stop(self):
+ """Blocks until the loop has finished.
+
+ Note that this must be called in another thread.
+ """
+ if not self.process:
+ return
+ self.stop_flag = True
+ self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ """A handler used to process http GET/POST request."""
+
+ def _SendResponse(self, response_code, response_reason, additional_headers,
+ contents):
+ """Generates a response sent to the client from the provided parameters.
+
+ Args:
+ response_code: number of the response status.
+ response_reason: string of reason description of the response.
+ additional_headers: dict of additional headers. Each key is the name of
+ the header, each value is the content of the header.
+ contents: string of the contents we want to send to client.
+ """
+ self.send_response(response_code, response_reason)
+ self.send_header('Content-Type', 'text/html')
+ # Specify the content-length as without it the http(s) response will not
+ # be completed properly (and the browser keeps expecting data).
+ self.send_header('Content-Length', len(contents))
+ for header_name in additional_headers:
+ self.send_header(header_name, additional_headers[header_name])
+ self.end_headers()
+ self.wfile.write(contents)
+ self.wfile.flush()
+
+ def _StartTestServer(self):
+ """Starts the test server thread."""
+ logging.info('Handling request to spawn a test server.')
+ content_type = self.headers.getheader('content-type')
+ if content_type != 'application/json':
+ raise Exception('Bad content-type for start request.')
+ content_length = self.headers.getheader('content-length')
+ if not content_length:
+ content_length = 0
+ try:
+ content_length = int(content_length)
+ except:
+ raise Exception('Bad content-length for start request.')
+ logging.info(content_length)
+ test_server_argument_json = self.rfile.read(content_length)
+ logging.info(test_server_argument_json)
+ assert not self.server.test_server_instance
+ ready_event = threading.Event()
+ self.server.test_server_instance = TestServerThread(
+ ready_event,
+ json.loads(test_server_argument_json),
+ self.server.adb,
+ self.server.tool,
+ self.server.build_type)
+ self.server.test_server_instance.setDaemon(True)
+ self.server.test_server_instance.start()
+ ready_event.wait()
+ if self.server.test_server_instance.is_ready:
+ self._SendResponse(200, 'OK', {}, json.dumps(
+ {'port': self.server.test_server_instance.forwarder_device_port,
+ 'message': 'started'}))
+ logging.info('Test server is running on port: %d.',
+ self.server.test_server_instance.host_port)
+ else:
+ self.server.test_server_instance.Stop()
+ self.server.test_server_instance = None
+ self._SendResponse(500, 'Test Server Error.', {}, '')
+ logging.info('Encounter problem during starting a test server.')
+
+ def _KillTestServer(self):
+ """Stops the test server instance."""
+ # There should only ever be one test server at a time. This may do the
+ # wrong thing if we try and start multiple test servers.
+ if not self.server.test_server_instance:
+ return
+ port = self.server.test_server_instance.host_port
+ logging.info('Handling request to kill a test server on port: %d.', port)
+ self.server.test_server_instance.Stop()
+ # Make sure the status of test server is correct before sending response.
+ if _CheckPortStatus(port, False):
+ self._SendResponse(200, 'OK', {}, 'killed')
+ logging.info('Test server on port %d is killed', port)
+ else:
+ self._SendResponse(500, 'Test Server Error.', {}, '')
+ logging.info('Encounter problem during killing a test server.')
+ self.server.test_server_instance = None
+
+ def do_POST(self):
+ parsed_path = urlparse.urlparse(self.path)
+ action = parsed_path.path
+ logging.info('Action for POST method is: %s.', action)
+ if action == '/start':
+ self._StartTestServer()
+ else:
+ self._SendResponse(400, 'Unknown request.', {}, '')
+ logging.info('Encounter unknown request: %s.', action)
+
+ def do_GET(self):
+ parsed_path = urlparse.urlparse(self.path)
+ action = parsed_path.path
+ params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+ logging.info('Action for GET method is: %s.', action)
+ for param in params:
+ logging.info('%s=%s', param, params[param][0])
+ if action == '/kill':
+ self._KillTestServer()
+ elif action == '/ping':
+ # The ping handler is used to check whether the spawner server is ready
+ # to serve the requests. We don't need to test the status of the test
+ # server when handling ping request.
+ self._SendResponse(200, 'OK', {}, 'ready')
+ logging.info('Handled ping request and sent response.')
+ else:
+ self._SendResponse(400, 'Unknown request', {}, '')
+ logging.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+ """The class used to start/stop a http server."""
+
+ def __init__(self, test_server_spawner_port, adb, tool, build_type):
+ logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
+ self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+ SpawningServerRequestHandler)
+ self.port = test_server_spawner_port
+ self.server.adb = adb
+ self.server.tool = tool
+ self.server.test_server_instance = None
+ self.server.build_type = build_type
+
+ def _Listen(self):
+ logging.info('Starting test server spawner')
+ self.server.serve_forever()
+
+ def Start(self):
+ listener_thread = threading.Thread(target=self._Listen)
+ listener_thread.setDaemon(True)
+ listener_thread.start()
+ time.sleep(1)
+
+ def Stop(self):
+ if self.server.test_server_instance:
+ self.server.test_server_instance.Stop()
+ self.server.shutdown()
diff --git a/media/webrtc/trunk/build/android/pylib/cmd_helper.py b/media/webrtc/trunk/build/android/pylib/cmd_helper.py
new file mode 100644
index 000000000..8b5013090
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/cmd_helper.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A wrapper for subprocess to make calling shell commands easier."""
+
+
+import logging
+import subprocess
+
+
+def RunCmd(args, cwd=None):
+ """Opens a subprocess to execute a program and returns its return value.
+
+ Args:
+ args: A string or a sequence of program arguments. The program to execute is
+ the string or the first item in the args sequence.
+ cwd: If not None, the subprocess's current directory will be changed to
+ |cwd| before it's executed.
+
+ Returns:
+ Return code from the command execution.
+ """
+ logging.info(str(args) + ' ' + (cwd or ''))
+ p = subprocess.Popen(args=args, cwd=cwd)
+ return p.wait()
+
+
+def GetCmdOutput(args, cwd=None, shell=False):
+ """Open a subprocess to execute a program and returns its output.
+
+ Args:
+ args: A string or a sequence of program arguments. The program to execute is
+ the string or the first item in the args sequence.
+ cwd: If not None, the subprocess's current directory will be changed to
+ |cwd| before it's executed.
+ shell: Whether to execute args as a shell command.
+
+ Returns:
+ Captures and returns the command's stdout.
+ Prints the command's stderr to logger (which defaults to stdout).
+ """
+ logging.info(str(args) + ' ' + (cwd or ''))
+ p = subprocess.Popen(args=args, cwd=cwd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, shell=shell)
+ stdout, stderr = p.communicate()
+ if stderr:
+ logging.critical(stderr)
+ logging.info(stdout[:4096]) # Truncate output longer than 4k.
+ return stdout
diff --git a/media/webrtc/trunk/build/android/pylib/constants.py b/media/webrtc/trunk/build/android/pylib/constants.py
new file mode 100644
index 000000000..b340612e2
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/constants.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+import os
+
+
+CHROME_PACKAGE = 'com.google.android.apps.chrome'
+CHROME_ACTIVITY = 'com.google.android.apps.chrome.Main'
+CHROME_TESTS_PACKAGE = 'com.google.android.apps.chrome.tests'
+LEGACY_BROWSER_PACKAGE = 'com.google.android.browser'
+LEGACY_BROWSER_ACTIVITY = 'com.android.browser.BrowserActivity'
+CONTENT_SHELL_PACKAGE = "org.chromium.content_shell"
+CONTENT_SHELL_ACTIVITY = "org.chromium.content_shell.ContentShellActivity"
+CHROME_SHELL_PACKAGE = 'org.chromium.chrome.browser.test'
+CHROMIUM_TEST_SHELL_PACKAGE = 'org.chromium.chrome.testshell'
+
+CHROME_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..', '..', '..'))
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+
+# The net test server is started from 10000. Reserve 20000 ports for the all
+# test-server based tests should be enough for allocating different port for
+# individual test-server based test.
+TEST_SERVER_PORT_FIRST = 10000
+TEST_SERVER_PORT_LAST = 30000
+# A file to record next valid port of test server.
+TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
+TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = '/data/data/' + CHROME_PACKAGE + '/files'
diff --git a/media/webrtc/trunk/build/android/pylib/debug_info.py b/media/webrtc/trunk/build/android/pylib/debug_info.py
new file mode 100644
index 000000000..6f0f55a33
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/debug_info.py
@@ -0,0 +1,196 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Collect debug info for a test."""
+
+import datetime
+import logging
+import os
+import re
+import shutil
+import string
+import subprocess
+import tempfile
+
+import cmd_helper
+
+
+TOMBSTONE_DIR = '/data/tombstones/'
+
+
+class GTestDebugInfo(object):
+ """A helper class to collect related debug information for a gtest.
+
+ Debug info is collected in two steps:
+ - first, object(s) of this class (one per device), accumulate logs
+ and screenshots in tempdir.
+ - once the test has finished, call ZipAndCleanResults to create
+ a zip containing the logs from all devices, and clean them up.
+
+ Args:
+ adb: ADB interface the tests are using.
+ device: Serial# of the Android device in which the specified gtest runs.
+ testsuite_name: Name of the specified gtest.
+ gtest_filter: Test filter used by the specified gtest.
+ """
+
+ def __init__(self, adb, device, testsuite_name, gtest_filter):
+ """Initializes the DebugInfo class for a specified gtest."""
+ self.adb = adb
+ self.device = device
+ self.testsuite_name = testsuite_name
+ self.gtest_filter = gtest_filter
+ self.logcat_process = None
+ self.has_storage = False
+ self.log_dir = os.path.join(tempfile.gettempdir(),
+ 'gtest_debug_info',
+ self.testsuite_name,
+ self.device)
+ if not os.path.exists(self.log_dir):
+ os.makedirs(self.log_dir)
+ self.log_file_name = os.path.join(self.log_dir,
+ self._GeneratePrefixName() + '_log.txt')
+ self.old_crash_files = self._ListCrashFiles()
+
+ def _GetSignatureFromGTestFilter(self):
+ """Gets a signature from gtest_filter.
+
+ Signature is used to identify the tests from which we collect debug
+ information.
+
+ Returns:
+ A signature string. Returns 'all' if there is no gtest filter.
+ """
+ if not self.gtest_filter:
+ return 'all'
+ filename_chars = "-_()%s%s" % (string.ascii_letters, string.digits)
+ signature = ''.join(c for c in self.gtest_filter if c in filename_chars)
+ if len(signature) > 64:
+ # The signature can't be too long, as it'll be part of a file name.
+ signature = signature[:64]
+ return signature
+
+ def _GeneratePrefixName(self):
+ """Generates a prefix name for debug information of the test.
+
+ The prefix name consists of the following:
+ (1) root name of test_suite_base.
+ (2) device serial number.
+ (3) prefix of filter signature generate from gtest_filter.
+ (4) date & time when calling this method.
+
+ Returns:
+ Name of the log file.
+ """
+ return (os.path.splitext(self.testsuite_name)[0] + '_' + self.device + '_' +
+ self._GetSignatureFromGTestFilter() + '_' +
+ datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M-%S-%f'))
+
+ def StartRecordingLog(self, clear=True, filters=['*:v']):
+ """Starts recording logcat output to a file.
+
+ This call should come before running test, with calling StopRecordingLog
+ following the tests.
+
+ Args:
+ clear: True if existing log output should be cleared.
+ filters: A list of logcat filters to be used.
+ """
+ self.StopRecordingLog()
+ if clear:
+ cmd_helper.RunCmd(['adb', '-s', self.device, 'logcat', '-c'])
+ logging.info('Start dumping log to %s ...', self.log_file_name)
+ command = 'adb -s %s logcat -v threadtime %s > %s' % (self.device,
+ ' '.join(filters),
+ self.log_file_name)
+ self.logcat_process = subprocess.Popen(command, shell=True)
+
+ def StopRecordingLog(self):
+ """Stops an existing logcat recording subprocess."""
+ if not self.logcat_process:
+ return
+ # Cannot evaluate directly as 0 is a possible value.
+ if self.logcat_process.poll() is None:
+ self.logcat_process.kill()
+ self.logcat_process = None
+ logging.info('Finish log dump.')
+
+ def TakeScreenshot(self, identifier_mark):
+ """Takes a screen shot from current specified device.
+
+ Args:
+ identifier_mark: A string to identify the screen shot DebugInfo will take.
+ It will be part of filename of the screen shot. Empty
+ string is acceptable.
+ Returns:
+ Returns the file name on the host of the screenshot if successful,
+ None otherwise.
+ """
+ assert isinstance(identifier_mark, str)
+ screenshot_path = os.path.join(os.getenv('ANDROID_HOST_OUT', ''),
+ 'bin',
+ 'screenshot2')
+ if not os.path.exists(screenshot_path):
+ logging.error('Failed to take screen shot from device %s', self.device)
+ return None
+ shot_path = os.path.join(self.log_dir, ''.join([self._GeneratePrefixName(),
+ identifier_mark,
+ '_screenshot.png']))
+ re_success = re.compile(re.escape('Success.'), re.MULTILINE)
+ if re_success.findall(cmd_helper.GetCmdOutput([screenshot_path, '-s',
+ self.device, shot_path])):
+ logging.info('Successfully took a screen shot to %s', shot_path)
+ return shot_path
+ logging.error('Failed to take screen shot from device %s', self.device)
+ return None
+
+ def _ListCrashFiles(self):
+ """Collects crash files from current specified device.
+
+ Returns:
+ A dict of crash files in format {"name": (size, lastmod), ...}.
+ """
+ return self.adb.ListPathContents(TOMBSTONE_DIR)
+
+ def ArchiveNewCrashFiles(self):
+ """Archives the crash files newly generated until calling this method."""
+ current_crash_files = self._ListCrashFiles()
+ files = []
+ for f in current_crash_files:
+ if f not in self.old_crash_files:
+ files += [f]
+ elif current_crash_files[f] != self.old_crash_files[f]:
+ # Tombstones dir can only have maximum 10 files, so we need to compare
+ # size and timestamp information of file if the file exists.
+ files += [f]
+ if files:
+ logging.info('New crash file(s):%s' % ' '.join(files))
+ for f in files:
+ self.adb.Adb().Pull(TOMBSTONE_DIR + f,
+ os.path.join(self.log_dir, f))
+
+ @staticmethod
+ def ZipAndCleanResults(dest_dir, dump_file_name):
+ """A helper method to zip all debug information results into a dump file.
+
+ Args:
+ dest_dir: Dir path in where we put the dump file.
+ dump_file_name: Desired name of the dump file. This method makes sure
+ '.zip' will be added as ext name.
+ """
+ if not dest_dir or not dump_file_name:
+ return
+ cmd_helper.RunCmd(['mkdir', '-p', dest_dir])
+ log_basename = os.path.basename(dump_file_name)
+ log_zip_file = os.path.join(dest_dir,
+ os.path.splitext(log_basename)[0] + '.zip')
+ logging.info('Zipping debug dumps into %s ...', log_zip_file)
+ # Add new dumps into the zip file. The zip may exist already if previous
+ # gtest also dumps the debug information. It's OK since we clean up the old
+ # dumps in each build step.
+ log_src_dir = os.path.join(tempfile.gettempdir(), 'gtest_debug_info')
+ cmd_helper.RunCmd(['zip', '-q', '-r', log_zip_file, log_src_dir])
+ assert os.path.exists(log_zip_file)
+ assert os.path.exists(log_src_dir)
+ shutil.rmtree(log_src_dir)
diff --git a/media/webrtc/trunk/build/android/pylib/device_stats_monitor.html b/media/webrtc/trunk/build/android/pylib/device_stats_monitor.html
new file mode 100644
index 000000000..b3abbb0bd
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/device_stats_monitor.html
@@ -0,0 +1,143 @@
+<!DOCTYPE html>
+<!--
+ * Copyright (c) 2012 The Chromium Authors. All rights reserved. Use of this
+ * source code is governed by a BSD-style license that can be found in the
+ * LICENSE file.
+-->
+<html>
+<head>
+ <title>Device Stats Monitor</title>
+ <script type="text/javascript" src="http://www.google.com/jsapi"></script>
+ <style>
+ body {
+ font-family: sans-serif
+ }
+ </style>
+</head>
+<body>
+<h2>Device Stats Monitor</h2>
+<ul>
+<li>Pass path to trace data via the <code>results</code> querystring param.
+<li>Combine charts with the <code>combine</code> querystring param (e.g. <code>&combine=sectors_read,sectors_written</code>).
+<li>Use <code>stacked=true</code> to stack combined charts instead of overlaying (default).
+</ul>
+</body>
+<script>
+google.load("visualization", "1", {packages:["corechart"]});
+
+/**
+ * @returns The querystring param value for |name| or an empty string.
+ */
+function getQuerystringParam(name) {
+ name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]");
+ var regexS = "[\\?&]" + name + "=([^&#]*)";
+ var regex = new RegExp(regexS);
+ var results = regex.exec(window.location.search);
+ if (results == null)
+ return "";
+ else
+ return decodeURIComponent(results[1].replace(/\+/g, " "));
+}
+
+/**
+ * @returns An array of keys in |obj| sorted by value.
+ */
+function sortedKeys(obj) {
+ var keys = [];
+ for (var key in obj) {
+ keys.push(key);
+ }
+ keys.sort();
+ return keys;
+}
+
+/**
+ * Removes by value all params from array.
+ */
+Array.prototype.remove = function() {
+ var what, a = arguments, l = a.length, ax;
+ while (l && this.length) {
+ what = a[--l];
+ while ((ax = this.indexOf(what)) != -1) {
+ this.splice(ax, 1);
+ }
+ }
+ return this;
+}
+
+/**
+ * Displays a new chart.
+ *
+ * @param {Number} hz Number of sample per second of the data.
+ * @param {String} name Name to display on top of chart.
+ * @param {Number[][]} values Array of value arrays to display.
+ * @param {Boolean} stacked Whether to display values as stacked.
+ */
+function displayChart(hz, name, values, units, stacked) {
+ var data = new google.visualization.DataTable();
+ data.addColumn('number', 'ms');
+ var names = name.split(',');
+ for (var i = 0; i < names.length; i++) {
+ data.addColumn('number', names[i]);
+ }
+
+ var rows = [];
+ var interval = 1000.0 / hz;
+ for (var i = 0; i < values[0].length; i++) {
+ var row = [i*interval];
+ for (var j = 0; j < values.length; j++) {
+ row.push(values[j][i]);
+ }
+ rows.push(row);
+ }
+ data.addRows(rows);
+
+ var options = {
+ hAxis: {title: 'ms (' + hz + 'hz)'},
+ isStacked: stacked,
+ legend: {position: 'top'},
+ vAxis: {title: units},
+ };
+
+ var elem = document.createElement('DIV');
+ elem.style = 'width:100%;height:500px';
+ document.body.appendChild(elem);
+ var chart = new google.visualization.AreaChart(elem);
+ chart.draw(data, options);
+}
+
+/**
+ * Displays all charts.
+ *
+ * Invoked by the results script. JSONP is used to avoid security
+ * restrictions on XHRs for file:// URLs.
+ */
+function display(hz, results, units) {
+ var combine = getQuerystringParam('combine');
+ var keys = sortedKeys(results);
+ for (var i = 0; i < keys.length; i++) {
+ var key = keys[i];
+ var name = key;
+ var values = [results[key]];
+ var unit = units[key];
+ if (combine.indexOf(key) >= 0) {
+ i--;
+ name = combine;
+ values = [];
+ var combined_keys = combine.split(',');
+ for (var j = 0; j < combined_keys.length; j++) {
+ values.push(results[combined_keys[j]]);
+ keys.remove(combined_keys[j]);
+ }
+ }
+ displayChart(hz, name, values, unit, !!getQuerystringParam('stacked'));
+ }
+}
+
+var resultsPath = getQuerystringParam('results');
+if (resultsPath)
+ document.write("<script src='" + resultsPath + "'></"+"script>");
+else
+ document.write("Please specify results querystring param.");
+</script>
+</html>
diff --git a/media/webrtc/trunk/build/android/pylib/device_stats_monitor.py b/media/webrtc/trunk/build/android/pylib/device_stats_monitor.py
new file mode 100644
index 000000000..8be4efae5
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/device_stats_monitor.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for iotop/top style profiling for android."""
+
+import collections
+import json
+import os
+import subprocess
+import sys
+import urllib
+
+import constants
+import io_stats_parser
+
+
+class DeviceStatsMonitor(object):
+ """Class for collecting device stats such as IO/CPU usage.
+
+ Args:
+ adb: Instance of AndroidComannds.
+ hz: Frequency at which to sample device stats.
+ """
+
+ DEVICE_PATH = constants.TEST_EXECUTABLE_DIR + '/device_stats_monitor'
+ PROFILE_PATH = (constants.DEVICE_PERF_OUTPUT_DIR +
+ '/device_stats_monitor.profile')
+ RESULT_VIEWER_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), 'device_stats_monitor.html'))
+
+ def __init__(self, adb, hz, build_type):
+ self._adb = adb
+ host_path = os.path.abspath(os.path.join(
+ constants.CHROME_DIR, 'out', build_type, 'device_stats_monitor'))
+ self._adb.PushIfNeeded(host_path, DeviceStatsMonitor.DEVICE_PATH)
+ self._hz = hz
+
+ def Start(self):
+ """Starts device stats monitor on the device."""
+ self._adb.SetFileContents(DeviceStatsMonitor.PROFILE_PATH, '')
+ self._process = subprocess.Popen(
+ ['adb', 'shell', '%s --hz=%d %s' % (
+ DeviceStatsMonitor.DEVICE_PATH, self._hz,
+ DeviceStatsMonitor.PROFILE_PATH)])
+
+ def StopAndCollect(self, output_path):
+ """Stops monitoring and saves results.
+
+ Args:
+ output_path: Path to save results.
+
+ Returns:
+ String of URL to load results in browser.
+ """
+ assert self._process
+ self._adb.KillAll(DeviceStatsMonitor.DEVICE_PATH)
+ self._process.wait()
+ profile = self._adb.GetFileContents(DeviceStatsMonitor.PROFILE_PATH)
+
+ results = collections.defaultdict(list)
+ last_io_stats = None
+ last_cpu_stats = None
+ for line in profile:
+ if ' mmcblk0 ' in line:
+ stats = io_stats_parser.ParseIoStatsLine(line)
+ if last_io_stats:
+ results['sectors_read'].append(stats.num_sectors_read -
+ last_io_stats.num_sectors_read)
+ results['sectors_written'].append(stats.num_sectors_written -
+ last_io_stats.num_sectors_written)
+ last_io_stats = stats
+ elif line.startswith('cpu '):
+ stats = self._ParseCpuStatsLine(line)
+ if last_cpu_stats:
+ results['user'].append(stats.user - last_cpu_stats.user)
+ results['nice'].append(stats.nice - last_cpu_stats.nice)
+ results['system'].append(stats.system - last_cpu_stats.system)
+ results['idle'].append(stats.idle - last_cpu_stats.idle)
+ results['iowait'].append(stats.iowait - last_cpu_stats.iowait)
+ results['irq'].append(stats.irq - last_cpu_stats.irq)
+ results['softirq'].append(stats.softirq- last_cpu_stats.softirq)
+ last_cpu_stats = stats
+ units = {
+ 'sectors_read': 'sectors',
+ 'sectors_written': 'sectors',
+ 'user': 'jiffies',
+ 'nice': 'jiffies',
+ 'system': 'jiffies',
+ 'idle': 'jiffies',
+ 'iowait': 'jiffies',
+ 'irq': 'jiffies',
+ 'softirq': 'jiffies',
+ }
+ with open(output_path, 'w') as f:
+ f.write('display(%d, %s, %s);' % (self._hz, json.dumps(results), units))
+ return 'file://%s?results=file://%s' % (
+ DeviceStatsMonitor.RESULT_VIEWER_PATH, urllib.quote(output_path))
+
+
+ @staticmethod
+ def _ParseCpuStatsLine(line):
+ """Parses a line of cpu stats into a CpuStats named tuple."""
+ # Field definitions: http://www.linuxhowtos.org/System/procstat.htm
+ cpu_stats = collections.namedtuple('CpuStats',
+ ['device',
+ 'user',
+ 'nice',
+ 'system',
+ 'idle',
+ 'iowait',
+ 'irq',
+ 'softirq',
+ ])
+ fields = line.split()
+ return cpu_stats._make([fields[0]] + [int(f) for f in fields[1:8]])
diff --git a/media/webrtc/trunk/build/android/pylib/fake_dns.py b/media/webrtc/trunk/build/android/pylib/fake_dns.py
new file mode 100644
index 000000000..1c6449004
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/fake_dns.py
@@ -0,0 +1,63 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import android_commands
+import constants
+import logging
+import os
+import subprocess
+import time
+
+
+class FakeDns(object):
+ """Wrapper class for the fake_dns tool."""
+ _FAKE_DNS_PATH = constants.TEST_EXECUTABLE_DIR + '/fake_dns'
+
+ def __init__(self, adb, build_type):
+ """
+ Args:
+ adb: the AndroidCommands to use.
+ build_type: 'Release' or 'Debug'.
+ """
+ self._adb = adb
+ self._build_type = build_type
+ self._fake_dns = None
+ self._original_dns = None
+
+ def _PushAndStartFakeDns(self):
+ """Starts the fake_dns server that replies all name queries 127.0.0.1.
+
+ Returns:
+ subprocess instance connected to the fake_dns process on the device.
+ """
+ self._adb.PushIfNeeded(
+ os.path.join(constants.CHROME_DIR, 'out', self._build_type, 'fake_dns'),
+ FakeDns._FAKE_DNS_PATH)
+ return subprocess.Popen(
+ ['adb', '-s', self._adb._adb.GetSerialNumber(),
+ 'shell', '%s -D' % FakeDns._FAKE_DNS_PATH])
+
+ def SetUp(self):
+ """Configures the system to point to a DNS server that replies 127.0.0.1.
+
+ This can be used in combination with the forwarder to forward all web
+ traffic to a replay server.
+
+ The TearDown() method will perform all cleanup.
+ """
+ self._adb.RunShellCommand('ip route add 8.8.8.0/24 via 127.0.0.1 dev lo')
+ self._fake_dns = self._PushAndStartFakeDns()
+ self._original_dns = self._adb.RunShellCommand('getprop net.dns1')[0]
+ self._adb.RunShellCommand('setprop net.dns1 127.0.0.1')
+ time.sleep(2) # Time for server to start and the setprop to take effect.
+
+ def TearDown(self):
+ """Shuts down the fake_dns."""
+ if self._fake_dns:
+ if not self._original_dns or self._original_dns == '127.0.0.1':
+ logging.warning('Bad original DNS, falling back to Google DNS.')
+ self._original_dns = '8.8.8.8'
+ self._adb.RunShellCommand('setprop net.dns1 %s' % self._original_dns)
+ self._fake_dns.kill()
+ self._adb.RunShellCommand('ip route del 8.8.8.0/24 via 127.0.0.1 dev lo')
diff --git a/media/webrtc/trunk/build/android/pylib/flag_changer.py b/media/webrtc/trunk/build/android/pylib/flag_changer.py
new file mode 100644
index 000000000..621056156
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/flag_changer.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import constants
+import traceback
+import warnings
+
+
+# Location where chrome reads command line flags from
+CHROME_COMMAND_FILE = constants.TEST_EXECUTABLE_DIR + '/chrome-command-line'
+
+class FlagChanger(object):
+ """Changes the flags Chrome runs with.
+
+ There are two different use cases for this file:
+ * Flags are permanently set by calling Set().
+ * Flags can be temporarily set for a particular set of unit tests. These
+ tests should call Restore() to revert the flags to their original state
+ once the tests have completed.
+ """
+
+ def __init__(self, android_cmd):
+ self._android_cmd = android_cmd
+
+ # Save the original flags.
+ self._orig_line = self._android_cmd.GetFileContents(CHROME_COMMAND_FILE)
+ if self._orig_line:
+ self._orig_line = self._orig_line[0].strip()
+
+ # Parse out the flags into a list to facilitate adding and removing flags.
+ self._current_flags = self._TokenizeFlags(self._orig_line)
+
+ def Get(self):
+ """Returns list of current flags."""
+ return self._current_flags
+
+ def Set(self, flags):
+ """Replaces all flags on the current command line with the flags given.
+
+ Args:
+ flags: A list of flags to set, eg. ['--single-process'].
+ """
+ if flags:
+ assert flags[0] != 'chrome'
+
+ self._current_flags = flags
+ self._UpdateCommandLineFile()
+
+ def AddFlags(self, flags):
+ """Appends flags to the command line if they aren't already there.
+
+ Args:
+ flags: A list of flags to add on, eg. ['--single-process'].
+ """
+ if flags:
+ assert flags[0] != 'chrome'
+
+ # Avoid appending flags that are already present.
+ for flag in flags:
+ if flag not in self._current_flags:
+ self._current_flags.append(flag)
+ self._UpdateCommandLineFile()
+
+ def RemoveFlags(self, flags):
+ """Removes flags from the command line, if they exist.
+
+ Args:
+ flags: A list of flags to remove, eg. ['--single-process']. Note that we
+ expect a complete match when removing flags; if you want to remove
+ a switch with a value, you must use the exact string used to add
+ it in the first place.
+ """
+ if flags:
+ assert flags[0] != 'chrome'
+
+ for flag in flags:
+ if flag in self._current_flags:
+ self._current_flags.remove(flag)
+ self._UpdateCommandLineFile()
+
+ def Restore(self):
+ """Restores the flags to their original state."""
+ self._current_flags = self._TokenizeFlags(self._orig_line)
+ self._UpdateCommandLineFile()
+
+ def _UpdateCommandLineFile(self):
+ """Writes out the command line to the file, or removes it if empty."""
+ print "Current flags: ", self._current_flags
+
+ if self._current_flags:
+ self._android_cmd.SetFileContents(CHROME_COMMAND_FILE,
+ 'chrome ' +
+ ' '.join(self._current_flags))
+ else:
+ self._android_cmd.RunShellCommand('rm ' + CHROME_COMMAND_FILE)
+
+ def _TokenizeFlags(self, line):
+ """Changes the string containing the command line into a list of flags.
+
+ Follows similar logic to CommandLine.java::tokenizeQuotedArguments:
+ * Flags are split using whitespace, unless the whitespace is within a
+ pair of quotation marks.
+ * Unlike the Java version, we keep the quotation marks around switch
+ values since we need them to re-create the file when new flags are
+ appended.
+
+ Args:
+ line: A string containing the entire command line. The first token is
+ assumed to be the program name.
+ """
+ if not line:
+ return []
+
+ tokenized_flags = []
+ current_flag = ""
+ within_quotations = False
+
+ # Move through the string character by character and build up each flag
+ # along the way.
+ for c in line.strip():
+ if c is '"':
+ if len(current_flag) > 0 and current_flag[-1] == '\\':
+ # Last char was a backslash; pop it, and treat this " as a literal.
+ current_flag = current_flag[0:-1] + '"'
+ else:
+ within_quotations = not within_quotations
+ current_flag += c
+ elif not within_quotations and (c is ' ' or c is '\t'):
+ if current_flag is not "":
+ tokenized_flags.append(current_flag)
+ current_flag = ""
+ else:
+ current_flag += c
+
+ # Tack on the last flag.
+ if not current_flag:
+ if within_quotations:
+ warnings.warn("Unterminated quoted string: " + current_flag)
+ else:
+ tokenized_flags.append(current_flag)
+
+ # Return everything but the program name.
+ return tokenized_flags[1:]
diff --git a/media/webrtc/trunk/build/android/pylib/forwarder.py b/media/webrtc/trunk/build/android/pylib/forwarder.py
new file mode 100644
index 000000000..bc41db3ff
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/forwarder.py
@@ -0,0 +1,198 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import sys
+import time
+
+import android_commands
+import cmd_helper
+import constants
+import ports
+
+from pylib import pexpect
+
+class Forwarder(object):
+ """Class to manage port forwards from the device to the host."""
+
+ _DEVICE_FORWARDER_PATH = constants.TEST_EXECUTABLE_DIR + '/device_forwarder'
+
+ # Unix Abstract socket path:
+ _DEVICE_ADB_CONTROL_PORT = 'chrome_device_forwarder'
+ _TIMEOUT_SECS = 30
+
+ def __init__(self, adb, port_pairs, tool, host_name, build_type):
+ """Forwards TCP ports on the device back to the host.
+
+ Works like adb forward, but in reverse.
+
+ Args:
+ adb: Instance of AndroidCommands for talking to the device.
+ port_pairs: A list of tuples (device_port, host_port) to forward. Note
+ that you can specify 0 as a device_port, in which case a
+ port will by dynamically assigned on the device. You can
+ get the number of the assigned port using the
+ DevicePortForHostPort method.
+ tool: Tool class to use to get wrapper, if necessary, for executing the
+ forwarder (see valgrind_tools.py).
+ host_name: Address to forward to, must be addressable from the
+ host machine. Usually use loopback '127.0.0.1'.
+ build_type: 'Release' or 'Debug'.
+
+ Raises:
+ Exception on failure to forward the port.
+ """
+ self._adb = adb
+ self._host_to_device_port_map = dict()
+ self._host_process = None
+ self._device_process = None
+ self._adb_forward_process = None
+
+ self._host_adb_control_port = ports.AllocateTestServerPort()
+ if not self._host_adb_control_port:
+ raise Exception('Failed to allocate a TCP port in the host machine.')
+ adb.PushIfNeeded(
+ os.path.join(constants.CHROME_DIR, 'out', build_type,
+ 'device_forwarder'),
+ Forwarder._DEVICE_FORWARDER_PATH)
+ self._host_forwarder_path = os.path.join(constants.CHROME_DIR,
+ 'out',
+ build_type,
+ 'host_forwarder')
+ forward_string = ['%d:%d:%s' %
+ (device, host, host_name) for device, host in port_pairs]
+ logging.info('Forwarding ports: %s', forward_string)
+ timeout_sec = 5
+ host_pattern = 'host_forwarder.*' + ' '.join(forward_string)
+ # TODO(felipeg): Rather than using a blocking kill() here, the device
+ # forwarder could try to bind the Unix Domain Socket until it succeeds or
+ # while it fails because the socket is already bound (with appropriate
+ # timeout handling obviously).
+ self._KillHostForwarderBlocking(host_pattern, timeout_sec)
+ self._KillDeviceForwarderBlocking(timeout_sec)
+ self._adb_forward_process = pexpect.spawn(
+ 'adb', ['-s',
+ adb._adb.GetSerialNumber(),
+ 'forward',
+ 'tcp:%s' % self._host_adb_control_port,
+ 'localabstract:%s' % Forwarder._DEVICE_ADB_CONTROL_PORT])
+ self._device_process = pexpect.spawn(
+ 'adb', ['-s',
+ adb._adb.GetSerialNumber(),
+ 'shell',
+ '%s %s -D --adb_sock=%s' % (
+ tool.GetUtilWrapper(),
+ Forwarder._DEVICE_FORWARDER_PATH,
+ Forwarder._DEVICE_ADB_CONTROL_PORT)])
+
+ device_success_re = re.compile('Starting Device Forwarder.')
+ device_failure_re = re.compile('.*:ERROR:(.*)')
+ index = self._device_process.expect([device_success_re,
+ device_failure_re,
+ pexpect.EOF,
+ pexpect.TIMEOUT],
+ Forwarder._TIMEOUT_SECS)
+ if index == 1:
+ # Failure
+ error_msg = str(self._device_process.match.group(1))
+ logging.error(self._device_process.before)
+ self._CloseProcess()
+ raise Exception('Failed to start Device Forwarder with Error: %s' %
+ error_msg)
+ elif index == 2:
+ logging.error(self._device_process.before)
+ self._CloseProcess()
+ raise Exception('Unexpected EOF while trying to start Device Forwarder.')
+ elif index == 3:
+ logging.error(self._device_process.before)
+ self._CloseProcess()
+ raise Exception('Timeout while trying start Device Forwarder')
+
+ self._host_process = pexpect.spawn(self._host_forwarder_path,
+ ['--adb_port=%s' % (
+ self._host_adb_control_port)] +
+ forward_string)
+
+ # Read the output of the command to determine which device ports where
+ # forwarded to which host ports (necessary if
+ host_success_re = re.compile('Forwarding device port (\d+) to host (\d+):')
+ host_failure_re = re.compile('Couldn\'t start forwarder server for port '
+ 'spec: (\d+):(\d+)')
+ for pair in port_pairs:
+ index = self._host_process.expect([host_success_re,
+ host_failure_re,
+ pexpect.EOF,
+ pexpect.TIMEOUT],
+ Forwarder._TIMEOUT_SECS)
+ if index == 0:
+ # Success
+ device_port = int(self._host_process.match.group(1))
+ host_port = int(self._host_process.match.group(2))
+ self._host_to_device_port_map[host_port] = device_port
+ logging.info("Forwarding device port: %d to host port: %d." %
+ (device_port, host_port))
+ elif index == 1:
+ # Failure
+ device_port = int(self._host_process.match.group(1))
+ host_port = int(self._host_process.match.group(2))
+ self._CloseProcess()
+ raise Exception('Failed to forward port %d to %d' % (device_port,
+ host_port))
+ elif index == 2:
+ logging.error(self._host_process.before)
+ self._CloseProcess()
+ raise Exception('Unexpected EOF while trying to forward ports %s' %
+ port_pairs)
+ elif index == 3:
+ logging.error(self._host_process.before)
+ self._CloseProcess()
+ raise Exception('Timeout while trying to forward ports %s' % port_pairs)
+
+ def _KillHostForwarderBlocking(self, host_pattern, timeout_sec):
+ """Kills any existing host forwarders using the provided pattern.
+
+ Note that this waits until the process terminates.
+ """
+ cmd_helper.RunCmd(['pkill', '-f', host_pattern])
+ elapsed = 0
+ wait_period = 0.1
+ while not cmd_helper.RunCmd(['pgrep', '-f', host_pattern]) and (
+ elapsed < timeout_sec):
+ time.sleep(wait_period)
+ elapsed += wait_period
+ if elapsed >= timeout_sec:
+ raise Exception('Timed out while killing ' + host_pattern)
+
+ def _KillDeviceForwarderBlocking(self, timeout_sec):
+ """Kills any existing device forwarders.
+
+ Note that this waits until the process terminates.
+ """
+ processes_killed = self._adb.KillAllBlocking(
+ 'device_forwarder', timeout_sec)
+ if not processes_killed:
+ pids = self._adb.ExtractPid('device_forwarder')
+ if pids:
+ raise Exception('Timed out while killing device_forwarder')
+
+ def _CloseProcess(self):
+ if self._host_process:
+ self._host_process.close()
+ if self._device_process:
+ self._device_process.close()
+ if self._adb_forward_process:
+ self._adb_forward_process.close()
+ self._host_process = None
+ self._device_process = None
+ self._adb_forward_process = None
+
+ def DevicePortForHostPort(self, host_port):
+ """Get the device port that corresponds to a given host port."""
+ return self._host_to_device_port_map.get(host_port)
+
+ def Close(self):
+ """Terminate the forwarder process."""
+ self._CloseProcess()
diff --git a/media/webrtc/trunk/build/android/pylib/io_stats_parser.py b/media/webrtc/trunk/build/android/pylib/io_stats_parser.py
new file mode 100644
index 000000000..89097abbc
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/io_stats_parser.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+"""
+
+
+import collections
+
+
+def ParseIoStatsLine(line):
+ """Parses a line of io stats into a IoStats named tuple."""
+ # Field definitions: http://www.kernel.org/doc/Documentation/iostats.txt
+ IoStats = collections.namedtuple('IoStats',
+ ['device',
+ 'num_reads_issued',
+ 'num_reads_merged',
+ 'num_sectors_read',
+ 'ms_spent_reading',
+ 'num_writes_completed',
+ 'num_writes_merged',
+ 'num_sectors_written',
+ 'ms_spent_writing',
+ 'num_ios_in_progress',
+ 'ms_spent_doing_io',
+ 'ms_spent_doing_io_weighted',
+ ])
+ fields = line.split()
+ return IoStats._make([fields[2]] + [int(f) for f in fields[3:]])
diff --git a/media/webrtc/trunk/build/android/pylib/java_unittest_utils.py b/media/webrtc/trunk/build/android/pylib/java_unittest_utils.py
new file mode 100644
index 000000000..b5446dcf9
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/java_unittest_utils.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This file is imported by python tests ran by run_python_tests.py."""
+
+import os
+
+import android_commands
+from run_java_tests import TestRunner
+
+
+def _GetPackageName(fname):
+ """Extracts the package name from the test file path."""
+ base_root = os.path.join('com', 'google', 'android')
+ dirname = os.path.dirname(fname)
+ package = dirname[dirname.rfind(base_root):]
+ return package.replace(os.sep, '.')
+
+
+def RunJavaTest(fname, suite, test, ports_to_forward):
+ device = android_commands.GetAttachedDevices()[0]
+ package_name = _GetPackageName(fname)
+ test = package_name + '.' + suite + '#' + test
+ java_test_runner = TestRunner(False, device, [test], False, False, False,
+ False, 0, ports_to_forward)
+ return java_test_runner.Run()
diff --git a/media/webrtc/trunk/build/android/pylib/json_perf_parser.py b/media/webrtc/trunk/build/android/pylib/json_perf_parser.py
new file mode 100644
index 000000000..1a8e61753
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/json_perf_parser.py
@@ -0,0 +1,160 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+ """Summarizes TraceEvent JSON data for performance metrics.
+
+ Example JSON Inputs (More tags can be added but these are required):
+ Measuring Duration:
+ [
+ { "cat": "Java",
+ "ts": 10000000000,
+ "ph": "S",
+ "name": "TestTrace"
+ },
+ { "cat": "Java",
+ "ts": 10000004000,
+ "ph": "F",
+ "name": "TestTrace"
+ },
+ ...
+ ]
+
+ Measuring Call Frequency (FPS):
+ [
+ { "cat": "Java",
+ "ts": 10000000000,
+ "ph": "I",
+ "name": "TestTraceFPS"
+ },
+ { "cat": "Java",
+ "ts": 10000004000,
+ "ph": "I",
+ "name": "TestTraceFPS"
+ },
+ ...
+ ]
+
+ Args:
+ json_data: A list of dictonaries each representing a JSON object.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ A dictionary of result data with the following tags:
+ min: The minimum value tracked.
+ max: The maximum value tracked.
+ average: The average of all the values tracked.
+ count: The number of times the category/name pair was tracked.
+ type: The type of tracking ('Instant' for instant tags and 'Span' for
+ begin/end tags.
+ category: The passed in category filter.
+ name: The passed in name filter.
+ data_points: A list of all of the times used to generate this data.
+ units: The units for the values being reported.
+
+ Raises:
+ Exception: if entry contains invalid data.
+ """
+
+ def EntryFilter(entry):
+ return entry['cat'] == 'Java' and entry['name'] == name
+ filtered_entries = filter(EntryFilter, json_data)
+
+ result = {}
+
+ result['min'] = -1
+ result['max'] = -1
+ result['average'] = 0
+ result['count'] = 0
+ result['type'] = 'Unknown'
+ result['category'] = 'Java'
+ result['name'] = name
+ result['data_points'] = []
+ result['units'] = ''
+
+ total_sum = 0
+
+ last_val = 0
+ val_type = None
+ for entry in filtered_entries:
+ if not val_type:
+ if 'mem' in entry:
+ val_type = 'mem'
+
+ def GetVal(entry):
+ return entry['mem']
+
+ result['units'] = 'kb'
+ elif 'ts' in entry:
+ val_type = 'ts'
+
+ def GetVal(entry):
+ return float(entry['ts']) / 1000.0
+
+ result['units'] = 'ms'
+ else:
+ raise Exception('Entry did not contain valid value info: %s' % entry)
+
+ if not val_type in entry:
+ raise Exception('Entry did not contain expected value type "%s" '
+ 'information: %s' % (val_type, entry))
+ val = GetVal(entry)
+ if (entry['ph'] == 'S' and
+ (result['type'] == 'Unknown' or result['type'] == 'Span')):
+ result['type'] = 'Span'
+ last_val = val
+ elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+ (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+ result['type'] == 'Instant'))):
+ if last_val > 0:
+ delta = val - last_val
+ if result['min'] == -1 or result['min'] > delta:
+ result['min'] = delta
+ if result['max'] == -1 or result['max'] < delta:
+ result['max'] = delta
+ total_sum += delta
+ result['count'] += 1
+ result['data_points'].append(delta)
+ if entry['ph'] == 'I':
+ result['type'] = 'Instant'
+ last_val = val
+ if result['count'] > 0: result['average'] = total_sum / result['count']
+
+ return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+ """Returns the results from GetAverageRunInfo using a JSON string.
+
+ Args:
+ json_string: The string containing JSON.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ See GetAverageRunInfo Returns section.
+ """
+ return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+ """Returns the results from GetAverageRunInfo using a JSON file.
+
+ Args:
+ json_file: The path to a JSON file.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ See GetAverageRunInfo Returns section.
+ """
+ with open(json_file, 'r') as f:
+ data = f.read()
+ perf = json.loads(data)
+
+ return GetAverageRunInfo(perf, name)
diff --git a/media/webrtc/trunk/build/android/pylib/perf_tests_helper.py b/media/webrtc/trunk/build/android/pylib/perf_tests_helper.py
new file mode 100644
index 000000000..c0a3ee413
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/perf_tests_helper.py
@@ -0,0 +1,120 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+import android_commands
+import math
+
+# Valid values of result type.
+RESULT_TYPES = {'unimportant': 'RESULT ',
+ 'default': '*RESULT ',
+ 'informational': ''}
+
+
+def _EscapePerfResult(s):
+ """Escapes |s| for use in a perf result."""
+ # Colons (:) and equal signs (=) are not allowed, and we chose an arbitrary
+ # limit of 40 chars.
+ return re.sub(':|=', '_', s[:40])
+
+
+def PrintPerfResult(measurement, trace, values, units, result_type='default',
+ print_to_stdout=True):
+ """Prints numerical data to stdout in the format required by perf tests.
+
+ The string args may be empty but they must not contain any colons (:) or
+ equals signs (=).
+
+ Args:
+ measurement: A description of the quantity being measured, e.g. "vm_peak".
+ trace: A description of the particular data point, e.g. "reference".
+ values: A list of numeric measured values.
+ units: A description of the units of measure, e.g. "bytes".
+ result_type: A tri-state that accepts values of ['unimportant', 'default',
+ 'informational']. 'unimportant' prints RESULT, 'default' prints *RESULT
+ and 'informational' prints nothing.
+ print_to_stdout: If True, prints the output in stdout instead of returning
+ the output to caller.
+
+ Returns:
+ String of the formated perf result.
+ """
+ assert result_type in RESULT_TYPES, 'result type: %s is invalid' % result_type
+
+ assert isinstance(values, list)
+ assert len(values)
+ assert '/' not in measurement
+ avg = None
+ sd = None
+ if len(values) > 1:
+ try:
+ value = '[%s]' % ','.join([str(v) for v in values])
+ avg = sum([float(v) for v in values]) / len(values)
+ sqdiffs = [(float(v) - avg) ** 2 for v in values]
+ variance = sum(sqdiffs) / (len(values) - 1)
+ sd = math.sqrt(variance)
+ except ValueError:
+ value = ", ".join(values)
+ else:
+ value = values[0]
+
+ trace_name = _EscapePerfResult(trace)
+ output = '%s%s: %s%s%s %s' % (
+ RESULT_TYPES[result_type],
+ _EscapePerfResult(measurement),
+ trace_name,
+ # Do not show equal sign if the trace is empty. Usually it happens when
+ # measurement is enough clear to describe the result.
+ '= ' if trace_name else '',
+ value,
+ units)
+ if avg:
+ output += '\nAvg %s: %f%s' % (measurement, avg, units)
+ if sd:
+ output += '\nSd %s: %f%s' % (measurement, sd, units)
+ if print_to_stdout:
+ print output
+ return output
+
+
+class PerfTestSetup(object):
+ """Provides methods for setting up a device for perf testing."""
+ _DROP_CACHES = '/proc/sys/vm/drop_caches'
+ _SCALING_GOVERNOR = '/sys/devices/system/cpu/cpu%d/cpufreq/scaling_governor'
+
+ def __init__(self, adb):
+ self._adb = adb
+ num_cpus = self._adb.GetFileContents('/sys/devices/system/cpu/online',
+ log_result=False)
+ assert num_cpus, 'Unable to find /sys/devices/system/cpu/online'
+ self._num_cpus = int(num_cpus[0].split('-')[-1])
+ self._original_scaling_governor = None
+
+ def DropRamCaches(self):
+ """Drops the filesystem ram caches for performance testing."""
+ if not self._adb.IsRootEnabled():
+ self._adb.EnableAdbRoot()
+ self._adb.RunShellCommand('sync')
+ self._adb.RunShellCommand('echo 3 > ' + PerfTestSetup._DROP_CACHES)
+
+ def SetUp(self):
+ """Sets up performance tests."""
+ if not self._original_scaling_governor:
+ self._original_scaling_governor = self._adb.GetFileContents(
+ PerfTestSetup._SCALING_GOVERNOR % 0,
+ log_result=False)[0]
+ self._SetScalingGovernorInternal('performance')
+ self.DropRamCaches()
+
+ def TearDown(self):
+ """Tears down performance tests."""
+ if self._original_scaling_governor:
+ self._SetScalingGovernorInternal(self._original_scaling_governor)
+ self._original_scaling_governor = None
+
+ def _SetScalingGovernorInternal(self, value):
+ for cpu in range(self._num_cpus):
+ self._adb.RunShellCommand(
+ ('echo %s > ' + PerfTestSetup._SCALING_GOVERNOR) % (value, cpu))
diff --git a/media/webrtc/trunk/build/android/pylib/ports.py b/media/webrtc/trunk/build/android/pylib/ports.py
new file mode 100644
index 000000000..e9b6b901e
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/ports.py
@@ -0,0 +1,167 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions that deals with local and device ports."""
+
+import contextlib
+import fcntl
+import httplib
+import logging
+import os
+import re
+import socket
+import traceback
+
+import cmd_helper
+import constants
+
+
+#The following two methods are used to allocate the port source for various
+# types of test servers. Because some net relates tests can be run on shards
+# at same time, it's important to have a mechanism to allocate the port process
+# safe. In here, we implement the safe port allocation by leveraging flock.
+def ResetTestServerPortAllocation():
+ """Reset the port allocation to start from TEST_SERVER_PORT_FIRST.
+
+ Returns:
+ Returns True if reset successes. Otherwise returns False.
+ """
+ try:
+ with open(constants.TEST_SERVER_PORT_FILE, 'w') as fp:
+ fp.write('%d' % constants.TEST_SERVER_PORT_FIRST)
+ if os.path.exists(constants.TEST_SERVER_PORT_LOCKFILE):
+ os.unlink(constants.TEST_SERVER_PORT_LOCKFILE)
+ return True
+ except Exception as e:
+ logging.error(e)
+ return False
+
+
+def AllocateTestServerPort():
+ """Allocate a port incrementally.
+
+ Returns:
+ Returns a valid port which should be in between TEST_SERVER_PORT_FIRST and
+ TEST_SERVER_PORT_LAST. Returning 0 means no more valid port can be used.
+ """
+ port = 0
+ ports_tried = []
+ try:
+ fp_lock = open(constants.TEST_SERVER_PORT_LOCKFILE, 'w')
+ fcntl.flock(fp_lock, fcntl.LOCK_EX)
+ # Get current valid port and calculate next valid port.
+ assert os.path.exists(constants.TEST_SERVER_PORT_FILE)
+ with open(constants.TEST_SERVER_PORT_FILE, 'r+') as fp:
+ port = int(fp.read())
+ ports_tried.append(port)
+ while IsHostPortUsed(port):
+ port += 1
+ ports_tried.append(port)
+ if (port > constants.TEST_SERVER_PORT_LAST or
+ port < constants.TEST_SERVER_PORT_FIRST):
+ port = 0
+ else:
+ fp.seek(0, os.SEEK_SET)
+ fp.write('%d' % (port + 1))
+ except Exception as e:
+ logging.info(e)
+ finally:
+ if fp_lock:
+ fcntl.flock(fp_lock, fcntl.LOCK_UN)
+ fp_lock.close()
+ if port:
+ logging.info('Allocate port %d for test server.', port)
+ else:
+ logging.error('Could not allocate port for test server. '
+ 'List of ports tried: %s', str(ports_tried))
+ return port
+
+
+def IsHostPortUsed(host_port):
+ """Checks whether the specified host port is used or not.
+
+ Uses -n -P to inhibit the conversion of host/port numbers to host/port names.
+
+ Args:
+ host_port: Port on host we want to check.
+
+ Returns:
+ True if the port on host is already used, otherwise returns False.
+ """
+ port_info = '(127\.0\.0\.1)|(localhost)\:%d' % host_port
+ # TODO(jnd): Find a better way to filter the port.
+ re_port = re.compile(port_info, re.MULTILINE)
+ if re_port.findall(cmd_helper.GetCmdOutput(['lsof', '-nPi:%d' % host_port])):
+ return True
+ return False
+
+
+def IsDevicePortUsed(adb, device_port, state=''):
+ """Checks whether the specified device port is used or not.
+
+ Args:
+ adb: Instance of AndroidCommands for talking to the device.
+ device_port: Port on device we want to check.
+ state: String of the specified state. Default is empty string, which
+ means any state.
+
+ Returns:
+ True if the port on device is already used, otherwise returns False.
+ """
+ base_url = '127.0.0.1:%d' % device_port
+ netstat_results = adb.RunShellCommand('netstat', log_result=False)
+ for single_connect in netstat_results:
+ # Column 3 is the local address which we want to check with.
+ connect_results = single_connect.split()
+ is_state_match = connect_results[5] == state if state else True
+ if connect_results[3] == base_url and is_state_match:
+ return True
+ return False
+
+
+def IsHttpServerConnectable(host, port, tries=3, command='GET', path='/',
+ expected_read='', timeout=2):
+ """Checks whether the specified http server is ready to serve request or not.
+
+ Args:
+ host: Host name of the HTTP server.
+ port: Port number of the HTTP server.
+ tries: How many times we want to test the connection. The default value is
+ 3.
+ command: The http command we use to connect to HTTP server. The default
+ command is 'GET'.
+ path: The path we use when connecting to HTTP server. The default path is
+ '/'.
+ expected_read: The content we expect to read from the response. The default
+ value is ''.
+ timeout: Timeout (in seconds) for each http connection. The default is 2s.
+
+ Returns:
+ Tuple of (connect status, client error). connect status is a boolean value
+ to indicate whether the server is connectable. client_error is the error
+ message the server returns when connect status is false.
+ """
+ assert tries >= 1
+ for i in xrange(0, tries):
+ client_error = None
+ try:
+ with contextlib.closing(httplib.HTTPConnection(
+ host, port, timeout=timeout)) as http:
+ # Output some debug information when we have tried more than 2 times.
+ http.set_debuglevel(i >= 2)
+ http.request(command, path)
+ r = http.getresponse()
+ content = r.read()
+ if r.status == 200 and r.reason == 'OK' and content == expected_read:
+ return (True, '')
+ client_error = ('Bad response: %s %s version %s\n ' %
+ (r.status, r.reason, r.version) +
+ '\n '.join([': '.join(h) for h in r.getheaders()]))
+ except (httplib.HTTPException, socket.error) as e:
+ # Probably too quick connecting: try again.
+ exception_error_msgs = traceback.format_exception_only(type(e), e)
+ if exception_error_msgs:
+ client_error = ''.join(exception_error_msgs)
+ # Only returns last client_error.
+ return (False, client_error or 'Timeout')
diff --git a/media/webrtc/trunk/build/android/pylib/python_test_base.py b/media/webrtc/trunk/build/android/pylib/python_test_base.py
new file mode 100644
index 000000000..3517cdda8
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/python_test_base.py
@@ -0,0 +1,168 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for Android Python-driven tests.
+
+This test case is intended to serve as the base class for any Python-driven
+tests. It is similar to the Python unitttest module in that the user's tests
+inherit from this case and add their tests in that case.
+
+When a PythonTestBase object is instantiated, its purpose is to run only one of
+its tests. The test runner gives it the name of the test the instance will
+run. The test runner calls SetUp with the Android device ID which the test will
+run against. The runner runs the test method itself, collecting the result,
+and calls TearDown.
+
+Tests can basically do whatever they want in the test methods, such as call
+Java tests using _RunJavaTests. Those methods have the advantage of massaging
+the Java test results into Python test results.
+"""
+
+import logging
+import os
+import time
+
+import android_commands
+import apk_info
+from run_java_tests import TestRunner
+from test_result import SingleTestResult, TestResults
+
+
+# aka the parent of com.google.android
+BASE_ROOT = 'src' + os.sep
+
+
+class PythonTestBase(object):
+ """Base class for Python-driven tests."""
+
+ def __init__(self, test_name):
+ # test_name must match one of the test methods defined on a subclass which
+ # inherits from this class.
+ # It's stored so we can do the attr lookup on demand, allowing this class
+ # to be pickled, a requirement for the multiprocessing module.
+ self.test_name = test_name
+ class_name = self.__class__.__name__
+ self.qualified_name = class_name + '.' + self.test_name
+
+ def SetUp(self, options):
+ self.options = options
+ self.shard_index = self.options.shard_index
+ self.device_id = self.options.device_id
+ self.adb = android_commands.AndroidCommands(self.device_id)
+ self.ports_to_forward = []
+
+ def TearDown(self):
+ pass
+
+ def Run(self):
+ logging.warning('Running Python-driven test: %s', self.test_name)
+ return getattr(self, self.test_name)()
+
+ def _RunJavaTest(self, fname, suite, test):
+ """Runs a single Java test with a Java TestRunner.
+
+ Args:
+ fname: filename for the test (e.g. foo/bar/baz/tests/FooTest.py)
+ suite: name of the Java test suite (e.g. FooTest)
+ test: name of the test method to run (e.g. testFooBar)
+
+ Returns:
+ TestResults object with a single test result.
+ """
+ test = self._ComposeFullTestName(fname, suite, test)
+ apks = [apk_info.ApkInfo(self.options.test_apk_path,
+ self.options.test_apk_jar_path)]
+ java_test_runner = TestRunner(self.options, self.device_id, [test], False,
+ self.shard_index,
+ apks,
+ self.ports_to_forward)
+ return java_test_runner.Run()
+
+ def _RunJavaTests(self, fname, tests):
+ """Calls a list of tests and stops at the first test failure.
+
+ This method iterates until either it encounters a non-passing test or it
+ exhausts the list of tests. Then it returns the appropriate Python result.
+
+ Args:
+ fname: filename for the Python test
+ tests: a list of Java test names which will be run
+
+ Returns:
+ A TestResults object containing a result for this Python test.
+ """
+ start_ms = int(time.time()) * 1000
+
+ result = None
+ for test in tests:
+ # We're only running one test at a time, so this TestResults object will
+ # hold only one result.
+ suite, test_name = test.split('.')
+ result = self._RunJavaTest(fname, suite, test_name)
+ # A non-empty list means the test did not pass.
+ if result.GetAllBroken():
+ break
+
+ duration_ms = int(time.time()) * 1000 - start_ms
+
+ # Do something with result.
+ return self._ProcessResults(result, start_ms, duration_ms)
+
+ def _ProcessResults(self, result, start_ms, duration_ms):
+ """Translates a Java test result into a Python result for this test.
+
+ The TestRunner class that we use under the covers will return a test result
+ for that specific Java test. However, to make reporting clearer, we have
+ this method to abstract that detail and instead report that as a failure of
+ this particular test case while still including the Java stack trace.
+
+ Args:
+ result: TestResults with a single Java test result
+ start_ms: the time the test started
+ duration_ms: the length of the test
+
+ Returns:
+ A TestResults object containing a result for this Python test.
+ """
+ test_results = TestResults()
+
+ # If our test is in broken, then it crashed/failed.
+ broken = result.GetAllBroken()
+ if broken:
+ # Since we have run only one test, take the first and only item.
+ single_result = broken[0]
+
+ log = single_result.log
+ if not log:
+ log = 'No logging information.'
+
+ python_result = SingleTestResult(self.qualified_name, start_ms,
+ duration_ms,
+ log)
+
+ # Figure out where the test belonged. There's probably a cleaner way of
+ # doing this.
+ if single_result in result.crashed:
+ test_results.crashed = [python_result]
+ elif single_result in result.failed:
+ test_results.failed = [python_result]
+ elif single_result in result.unknown:
+ test_results.unknown = [python_result]
+
+ else:
+ python_result = SingleTestResult(self.qualified_name, start_ms,
+ duration_ms)
+ test_results.ok = [python_result]
+
+ return test_results
+
+ def _ComposeFullTestName(self, fname, suite, test):
+ package_name = self._GetPackageName(fname)
+ return package_name + '.' + suite + '#' + test
+
+ def _GetPackageName(self, fname):
+ """Extracts the package name from the test file path."""
+ dirname = os.path.dirname(fname)
+ package = dirname[dirname.rfind(BASE_ROOT) + len(BASE_ROOT):]
+ return package.replace(os.sep, '.')
diff --git a/media/webrtc/trunk/build/android/pylib/python_test_caller.py b/media/webrtc/trunk/build/android/pylib/python_test_caller.py
new file mode 100644
index 000000000..882b89299
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/python_test_caller.py
@@ -0,0 +1,84 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper module for calling python-based tests."""
+
+
+import logging
+import sys
+import time
+
+from test_result import TestResults
+
+
+def CallPythonTest(test, options):
+ """Invokes a test function and translates Python exceptions into test results.
+
+ This method invokes SetUp()/TearDown() on the test. It is intended to be
+ resilient to exceptions in SetUp(), the test itself, and TearDown(). Any
+ Python exception means the test is marked as failed, and the test result will
+ contain information about the exception.
+
+ If SetUp() raises an exception, the test is not run.
+
+ If TearDown() raises an exception, the test is treated as a failure. However,
+ if the test itself raised an exception beforehand, that stack trace will take
+ precedence whether or not TearDown() also raised an exception.
+
+ shard_index is not applicable in single-device scenarios, when test execution
+ is serial rather than parallel. Tests can use this to bring up servers with
+ unique port numbers, for example. See also python_test_sharder.
+
+ Args:
+ test: an object which is ostensibly a subclass of PythonTestBase.
+ options: Options to use for setting up tests.
+
+ Returns:
+ A TestResults object which contains any results produced by the test or, in
+ the case of a Python exception, the Python exception info.
+ """
+
+ start_date_ms = int(time.time()) * 1000
+ failed = False
+
+ try:
+ test.SetUp(options)
+ except Exception:
+ failed = True
+ logging.exception(
+ 'Caught exception while trying to run SetUp() for test: ' +
+ test.qualified_name)
+ # Tests whose SetUp() method has failed are likely to fail, or at least
+ # yield invalid results.
+ exc_info = sys.exc_info()
+ return TestResults.FromPythonException(test.qualified_name, start_date_ms,
+ exc_info)
+
+ try:
+ result = test.Run()
+ except Exception:
+ # Setting this lets TearDown() avoid stomping on our stack trace from Run()
+ # should TearDown() also raise an exception.
+ failed = True
+ logging.exception('Caught exception while trying to run test: ' +
+ test.qualified_name)
+ exc_info = sys.exc_info()
+ result = TestResults.FromPythonException(test.qualified_name, start_date_ms,
+ exc_info)
+
+ try:
+ test.TearDown()
+ except Exception:
+ logging.exception(
+ 'Caught exception while trying run TearDown() for test: ' +
+ test.qualified_name)
+ if not failed:
+ # Don't stomp the error during the test if TearDown blows up. This is a
+ # trade-off: if the test fails, this will mask any problem with TearDown
+ # until the test is fixed.
+ exc_info = sys.exc_info()
+ result = TestResults.FromPythonException(test.qualified_name,
+ start_date_ms, exc_info)
+
+ return result
diff --git a/media/webrtc/trunk/build/android/pylib/python_test_sharder.py b/media/webrtc/trunk/build/android/pylib/python_test_sharder.py
new file mode 100644
index 000000000..e27096d78
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/python_test_sharder.py
@@ -0,0 +1,203 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes care of sharding the python-drive tests in multiple devices."""
+
+import copy
+import logging
+import multiprocessing
+
+from python_test_caller import CallPythonTest
+from run_java_tests import FatalTestException
+import sharded_tests_queue
+from test_result import TestResults
+
+
+def SetTestsContainer(tests_container):
+ """Sets PythonTestSharder as a top-level field.
+
+ PythonTestSharder uses multiprocessing.Pool, which creates a pool of
+ processes. This is used to initialize each worker in the pool, ensuring that
+ each worker has access to this shared pool of tests.
+
+ The multiprocessing module requires that this be a top-level method.
+
+ Args:
+ tests_container: the container for all the tests.
+ """
+ PythonTestSharder.tests_container = tests_container
+
+
+def _DefaultRunnable(test_runner):
+ """A default runnable for a PythonTestRunner.
+
+ Args:
+ test_runner: A PythonTestRunner which will run tests.
+
+ Returns:
+ The test results.
+ """
+ return test_runner.RunTests()
+
+
+class PythonTestRunner(object):
+ """Thin wrapper around a list of PythonTestBase instances.
+
+ This is meant to be a long-lived object which can run multiple Python tests
+ within its lifetime. Tests will receive the device_id and shard_index.
+
+ The shard index affords the ability to create unique port numbers (e.g.
+ DEFAULT_PORT + shard_index) if the test so wishes.
+ """
+
+ def __init__(self, options):
+ """Constructor.
+
+ Args:
+ options: Options to use for setting up tests.
+ """
+ self.options = options
+
+ def RunTests(self):
+ """Runs tests from the shared pool of tests, aggregating results.
+
+ Returns:
+ A list of test results for all of the tests which this runner executed.
+ """
+ tests = PythonTestSharder.tests_container
+
+ results = []
+ for t in tests:
+ res = CallPythonTest(t, self.options)
+ results.append(res)
+
+ return TestResults.FromTestResults(results)
+
+
+class PythonTestSharder(object):
+ """Runs Python tests in parallel on multiple devices.
+
+ This is lifted more or less wholesale from BaseTestRunner.
+
+ Under the covers, it creates a pool of long-lived PythonTestRunners, which
+ execute tests from the pool of tests.
+
+ Args:
+ attached_devices: a list of device IDs attached to the host.
+ available_tests: a list of tests to run which subclass PythonTestBase.
+ options: Options to use for setting up tests.
+
+ Returns:
+ An aggregated list of test results.
+ """
+ tests_container = None
+
+ def __init__(self, attached_devices, available_tests, options):
+ self.options = options
+ self.attached_devices = attached_devices
+ self.retries = options.shard_retries
+ self.tests = available_tests
+
+ def _SetupSharding(self, tests):
+ """Creates the shared pool of tests and makes it available to test runners.
+
+ Args:
+ tests: the list of tests which will be consumed by workers.
+ """
+ SetTestsContainer(sharded_tests_queue.ShardedTestsQueue(
+ len(self.attached_devices), tests))
+
+ def RunShardedTests(self):
+ """Runs tests in parallel using a pool of workers.
+
+ Returns:
+ A list of test results aggregated from all test runs.
+ """
+ logging.warning('*' * 80)
+ logging.warning('Sharding in ' + str(len(self.attached_devices)) +
+ ' devices.')
+ logging.warning('Note that the output is not synchronized.')
+ logging.warning('Look for the "Final result" banner in the end.')
+ logging.warning('*' * 80)
+ all_passed = []
+ test_results = TestResults()
+ tests_to_run = self.tests
+ for retry in xrange(self.retries):
+ logging.warning('Try %d of %d', retry + 1, self.retries)
+ self._SetupSharding(self.tests)
+ test_runners = self._MakeTestRunners(self.attached_devices)
+ logging.warning('Starting...')
+ pool = multiprocessing.Pool(len(self.attached_devices),
+ SetTestsContainer,
+ [PythonTestSharder.tests_container])
+
+ # List of TestResults objects from each test execution.
+ try:
+ results_lists = pool.map(_DefaultRunnable, test_runners)
+ except Exception:
+ logging.exception('Unable to run tests. Something with the '
+ 'PythonTestRunners has gone wrong.')
+ raise FatalTestException('PythonTestRunners were unable to run tests.')
+
+ test_results = TestResults.FromTestResults(results_lists)
+ # Accumulate passing results.
+ all_passed += test_results.ok
+ # If we have failed tests, map them to tests to retry.
+ failed_tests = test_results.GetAllBroken()
+ tests_to_run = self._GetTestsToRetry(self.tests,
+ failed_tests)
+
+ # Bail out early if we have no more tests. This can happen if all tests
+ # pass before we're out of retries, for example.
+ if not tests_to_run:
+ break
+
+ final_results = TestResults()
+ # all_passed has accumulated all passing test results.
+ # test_results will have the results from the most recent run, which could
+ # include a variety of failure modes (unknown, crashed, failed, etc).
+ final_results = test_results
+ final_results.ok = all_passed
+
+ return final_results
+
+ def _MakeTestRunners(self, attached_devices):
+ """Initialize and return a list of PythonTestRunners.
+
+ Args:
+ attached_devices: list of device IDs attached to host.
+
+ Returns:
+ A list of PythonTestRunners, one for each device.
+ """
+ test_runners = []
+ for index, device in enumerate(attached_devices):
+ logging.warning('*' * 80)
+ logging.warning('Creating shard %d for %s', index, device)
+ logging.warning('*' * 80)
+ # Bind the PythonTestRunner to a device & shard index. Give it the
+ # runnable which it will use to actually execute the tests.
+ test_options = copy.deepcopy(self.options)
+ test_options.ensure_value('device_id', device)
+ test_options.ensure_value('shard_index', index)
+ test_runner = PythonTestRunner(test_options)
+ test_runners.append(test_runner)
+
+ return test_runners
+
+ def _GetTestsToRetry(self, available_tests, failed_tests):
+ """Infers a list of tests to retry from failed tests and available tests.
+
+ Args:
+ available_tests: a list of tests which subclass PythonTestBase.
+ failed_tests: a list of SingleTestResults representing failed tests.
+
+ Returns:
+ A list of test objects which correspond to test names found in
+ failed_tests, or an empty list if there is no correspondence.
+ """
+ failed_test_names = map(lambda t: t.test_name, failed_tests)
+ tests_to_retry = [t for t in available_tests
+ if t.qualified_name in failed_test_names]
+ return tests_to_retry
diff --git a/media/webrtc/trunk/build/android/pylib/run_java_tests.py b/media/webrtc/trunk/build/android/pylib/run_java_tests.py
new file mode 100644
index 000000000..fc0a13fd8
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/run_java_tests.py
@@ -0,0 +1,591 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs the Java tests. See more information on run_instrumentation_tests.py."""
+
+import fnmatch
+import logging
+import os
+import re
+import shutil
+import sys
+import time
+
+import android_commands
+import apk_info
+from base_test_runner import BaseTestRunner
+from base_test_sharder import BaseTestSharder, SetTestsContainer
+import cmd_helper
+import constants
+import errors
+from forwarder import Forwarder
+from json_perf_parser import GetAverageRunInfoFromJSONString
+from perf_tests_helper import PrintPerfResult
+import sharded_tests_queue
+from test_result import SingleTestResult, TestResults
+import valgrind_tools
+
+_PERF_TEST_ANNOTATION = 'PerfTest'
+
+
+class FatalTestException(Exception):
+ """A fatal test exception."""
+ pass
+
+
+def _TestNameToExpectation(test_name):
+ # A test name is a Package.Path.Class#testName; convert to what we use in
+ # the expectation file.
+ return '.'.join(test_name.replace('#', '.').split('.')[-2:])
+
+
+def FilterTests(test_names, pattern_list, inclusive):
+ """Filters |test_names| using a list of patterns.
+
+ Args:
+ test_names: A list of test names.
+ pattern_list: A list of patterns.
+ inclusive: If True, returns the tests that match any pattern. if False,
+ returns the tests that do not match any pattern.
+ Returns:
+ A list of test names.
+ """
+ ret = []
+ for t in test_names:
+ has_match = False
+ for pattern in pattern_list:
+ has_match = has_match or fnmatch.fnmatch(_TestNameToExpectation(t),
+ pattern)
+ if has_match == inclusive:
+ ret += [t]
+ return ret
+
+
+class TestRunner(BaseTestRunner):
+ """Responsible for running a series of tests connected to a single device."""
+
+ _DEVICE_DATA_DIR = 'chrome/test/data'
+ _EMMA_JAR = os.path.join(os.environ.get('ANDROID_BUILD_TOP', ''),
+ 'external/emma/lib/emma.jar')
+ _COVERAGE_MERGED_FILENAME = 'unittest_coverage.es'
+ _COVERAGE_WEB_ROOT_DIR = os.environ.get('EMMA_WEB_ROOTDIR')
+ _COVERAGE_FILENAME = 'coverage.ec'
+ _COVERAGE_RESULT_PATH = ('/data/data/com.google.android.apps.chrome/files/' +
+ _COVERAGE_FILENAME)
+ _COVERAGE_META_INFO_PATH = os.path.join(os.environ.get('ANDROID_BUILD_TOP',
+ ''),
+ 'out/target/common/obj/APPS',
+ 'Chrome_intermediates/coverage.em')
+ _HOSTMACHINE_PERF_OUTPUT_FILE = '/tmp/chrome-profile'
+ _DEVICE_PERF_OUTPUT_SEARCH_PREFIX = (constants.DEVICE_PERF_OUTPUT_DIR +
+ '/chrome-profile*')
+ _DEVICE_HAS_TEST_FILES = {}
+
+ def __init__(self, options, device, tests_iter, coverage, shard_index, apks,
+ ports_to_forward):
+ """Create a new TestRunner.
+
+ Args:
+ options: An options object with the following required attributes:
+ - build_type: 'Release' or 'Debug'.
+ - install_apk: Re-installs the apk if opted.
+ - save_perf_json: Whether or not to save the JSON file from UI perf
+ tests.
+ - screenshot_failures: Take a screenshot for a test failure
+ - tool: Name of the Valgrind tool.
+ - wait_for_debugger: blocks until the debugger is connected.
+ device: Attached android device.
+ tests_iter: A list of tests to be run.
+ coverage: Collects coverage information if opted.
+ shard_index: shard # for this TestRunner, used to create unique port
+ numbers.
+ apks: A list of ApkInfo objects need to be installed. The first element
+ should be the tests apk, the rests could be the apks used in test.
+ The default is ChromeTest.apk.
+ ports_to_forward: A list of port numbers for which to set up forwarders.
+ Can be optionally requested by a test case.
+ Raises:
+ FatalTestException: if coverage metadata is not available.
+ """
+ BaseTestRunner.__init__(
+ self, device, options.tool, shard_index, options.build_type)
+
+ if not apks:
+ apks = [apk_info.ApkInfo(options.test_apk_path,
+ options.test_apk_jar_path)]
+
+ self.build_type = options.build_type
+ self.install_apk = options.install_apk
+ self.save_perf_json = options.save_perf_json
+ self.screenshot_failures = options.screenshot_failures
+ self.wait_for_debugger = options.wait_for_debugger
+
+ self.tests_iter = tests_iter
+ self.coverage = coverage
+ self.apks = apks
+ self.test_apk = apks[0]
+ self.instrumentation_class_path = self.test_apk.GetPackageName()
+ self.ports_to_forward = ports_to_forward
+
+ self.test_results = TestResults()
+ self.forwarder = None
+
+ if self.coverage:
+ if os.path.exists(TestRunner._COVERAGE_MERGED_FILENAME):
+ os.remove(TestRunner._COVERAGE_MERGED_FILENAME)
+ if not os.path.exists(TestRunner._COVERAGE_META_INFO_PATH):
+ raise FatalTestException('FATAL ERROR in ' + sys.argv[0] +
+ ' : Coverage meta info [' +
+ TestRunner._COVERAGE_META_INFO_PATH +
+ '] does not exist.')
+ if (not TestRunner._COVERAGE_WEB_ROOT_DIR or
+ not os.path.exists(TestRunner._COVERAGE_WEB_ROOT_DIR)):
+ raise FatalTestException('FATAL ERROR in ' + sys.argv[0] +
+ ' : Path specified in $EMMA_WEB_ROOTDIR [' +
+ TestRunner._COVERAGE_WEB_ROOT_DIR +
+ '] does not exist.')
+
+ def _GetTestsIter(self):
+ if not self.tests_iter:
+ # multiprocessing.Queue can't be pickled across processes if we have it as
+ # a member set during constructor. Grab one here instead.
+ self.tests_iter = (BaseTestSharder.tests_container)
+ assert self.tests_iter
+ return self.tests_iter
+
+ def CopyTestFilesOnce(self):
+ """Pushes the test data files to the device. Installs the apk if opted."""
+ if TestRunner._DEVICE_HAS_TEST_FILES.get(self.device, False):
+ logging.warning('Already copied test files to device %s, skipping.',
+ self.device)
+ return
+ host_test_files = [
+ ('android_webview/test/data/device_files', 'webview'),
+ ('content/test/data/android/device_files', 'content'),
+ ('chrome/test/data/android/device_files', 'chrome')
+ ]
+ for (host_src, dst_layer) in host_test_files:
+ host_test_files_path = constants.CHROME_DIR + '/' + host_src
+ if os.path.exists(host_test_files_path):
+ self.adb.PushIfNeeded(host_test_files_path,
+ self.adb.GetExternalStorage() + '/' +
+ TestRunner._DEVICE_DATA_DIR + '/' + dst_layer)
+ if self.install_apk:
+ for apk in self.apks:
+ self.adb.ManagedInstall(apk.GetApkPath(),
+ package_name=apk.GetPackageName())
+ self.tool.CopyFiles()
+ TestRunner._DEVICE_HAS_TEST_FILES[self.device] = True
+
+ def SaveCoverageData(self, test):
+ """Saves the Emma coverage data before it's overwritten by the next test.
+
+ Args:
+ test: the test whose coverage data is collected.
+ """
+ if not self.coverage:
+ return
+ if not self.adb.Adb().Pull(TestRunner._COVERAGE_RESULT_PATH,
+ constants.CHROME_DIR):
+ logging.error('ERROR: Unable to find file ' +
+ TestRunner._COVERAGE_RESULT_PATH +
+ ' on the device for test ' + test)
+ pulled_coverage_file = os.path.join(constants.CHROME_DIR,
+ TestRunner._COVERAGE_FILENAME)
+ if os.path.exists(TestRunner._COVERAGE_MERGED_FILENAME):
+ cmd = ['java', '-classpath', TestRunner._EMMA_JAR, 'emma', 'merge',
+ '-in', pulled_coverage_file,
+ '-in', TestRunner._COVERAGE_MERGED_FILENAME,
+ '-out', TestRunner._COVERAGE_MERGED_FILENAME]
+ cmd_helper.RunCmd(cmd)
+ else:
+ shutil.copy(pulled_coverage_file,
+ TestRunner._COVERAGE_MERGED_FILENAME)
+ os.remove(pulled_coverage_file)
+
+ def GenerateCoverageReportIfNeeded(self):
+ """Uses the Emma to generate a coverage report and a html page."""
+ if not self.coverage:
+ return
+ cmd = ['java', '-classpath', TestRunner._EMMA_JAR,
+ 'emma', 'report', '-r', 'html',
+ '-in', TestRunner._COVERAGE_MERGED_FILENAME,
+ '-in', TestRunner._COVERAGE_META_INFO_PATH]
+ cmd_helper.RunCmd(cmd)
+ new_dir = os.path.join(TestRunner._COVERAGE_WEB_ROOT_DIR,
+ time.strftime('Coverage_for_%Y_%m_%d_%a_%H:%M'))
+ shutil.copytree('coverage', new_dir)
+
+ latest_dir = os.path.join(TestRunner._COVERAGE_WEB_ROOT_DIR,
+ 'Latest_Coverage_Run')
+ if os.path.exists(latest_dir):
+ shutil.rmtree(latest_dir)
+ os.mkdir(latest_dir)
+ webserver_new_index = os.path.join(new_dir, 'index.html')
+ webserver_new_files = os.path.join(new_dir, '_files')
+ webserver_latest_index = os.path.join(latest_dir, 'index.html')
+ webserver_latest_files = os.path.join(latest_dir, '_files')
+ # Setup new softlinks to last result.
+ os.symlink(webserver_new_index, webserver_latest_index)
+ os.symlink(webserver_new_files, webserver_latest_files)
+ cmd_helper.RunCmd(['chmod', '755', '-R', latest_dir, new_dir])
+
+ def _GetInstrumentationArgs(self):
+ ret = {}
+ if self.coverage:
+ ret['coverage'] = 'true'
+ if self.wait_for_debugger:
+ ret['debug'] = 'true'
+ return ret
+
+ def _TakeScreenshot(self, test):
+ """Takes a screenshot from the device."""
+ screenshot_tool = os.path.join(constants.CHROME_DIR,
+ 'third_party/android_tools/sdk/tools/monkeyrunner')
+ screenshot_script = os.path.join(constants.CHROME_DIR,
+ 'build/android/monkeyrunner_screenshot.py')
+ screenshot_path = os.path.join(constants.CHROME_DIR,
+ 'out_screenshots')
+ if not os.path.exists(screenshot_path):
+ os.mkdir(screenshot_path)
+ screenshot_name = os.path.join(screenshot_path, test + '.png')
+ logging.info('Taking screenshot named %s', screenshot_name)
+ cmd_helper.RunCmd([screenshot_tool, screenshot_script,
+ '--serial', self.device,
+ '--file', screenshot_name])
+
+ def SetUp(self):
+ """Sets up the test harness and device before all tests are run."""
+ super(TestRunner, self).SetUp()
+ if not self.adb.IsRootEnabled():
+ logging.warning('Unable to enable java asserts for %s, non rooted device',
+ self.device)
+ else:
+ if self.adb.SetJavaAssertsEnabled(enable=True):
+ self.adb.Reboot(full_reboot=False)
+
+ # We give different default value to launch HTTP server based on shard index
+ # because it may have race condition when multiple processes are trying to
+ # launch lighttpd with same port at same time.
+ http_server_ports = self.LaunchTestHttpServer(
+ os.path.join(constants.CHROME_DIR),
+ (constants.LIGHTTPD_RANDOM_PORT_FIRST + self.shard_index))
+ if self.ports_to_forward:
+ port_pairs = [(port, port) for port in self.ports_to_forward]
+ # We need to remember which ports the HTTP server is using, since the
+ # forwarder will stomp on them otherwise.
+ port_pairs.append(http_server_ports)
+ self.forwarder = Forwarder(
+ self.adb, port_pairs, self.tool, '127.0.0.1', self.build_type)
+ self.CopyTestFilesOnce()
+ self.flags.AddFlags(['--enable-test-intents'])
+
+ def TearDown(self):
+ """Cleans up the test harness and saves outstanding data from test run."""
+ if self.forwarder:
+ self.forwarder.Close()
+ self.GenerateCoverageReportIfNeeded()
+ super(TestRunner, self).TearDown()
+
+ def TestSetup(self, test):
+ """Sets up the test harness for running a particular test.
+
+ Args:
+ test: The name of the test that will be run.
+ """
+ self.SetupPerfMonitoringIfNeeded(test)
+ self._SetupIndividualTestTimeoutScale(test)
+ self.tool.SetupEnvironment()
+
+ # Make sure the forwarder is still running.
+ self.RestartHttpServerForwarderIfNecessary()
+
+ def _IsPerfTest(self, test):
+ """Determines whether a test is a performance test.
+
+ Args:
+ test: The name of the test to be checked.
+
+ Returns:
+ Whether the test is annotated as a performance test.
+ """
+ return _PERF_TEST_ANNOTATION in self.test_apk.GetTestAnnotations(test)
+
+ def SetupPerfMonitoringIfNeeded(self, test):
+ """Sets up performance monitoring if the specified test requires it.
+
+ Args:
+ test: The name of the test to be run.
+ """
+ if not self._IsPerfTest(test):
+ return
+ self.adb.Adb().SendCommand('shell rm ' +
+ TestRunner._DEVICE_PERF_OUTPUT_SEARCH_PREFIX)
+ self.adb.StartMonitoringLogcat()
+
+ def TestTeardown(self, test, test_result):
+ """Cleans up the test harness after running a particular test.
+
+ Depending on the options of this TestRunner this might handle coverage
+ tracking or performance tracking. This method will only be called if the
+ test passed.
+
+ Args:
+ test: The name of the test that was just run.
+ test_result: result for this test.
+ """
+
+ self.tool.CleanUpEnvironment()
+
+ # The logic below relies on the test passing.
+ if not test_result or test_result.GetStatusCode():
+ return
+
+ self.TearDownPerfMonitoring(test)
+ self.SaveCoverageData(test)
+
+ def TearDownPerfMonitoring(self, test):
+ """Cleans up performance monitoring if the specified test required it.
+
+ Args:
+ test: The name of the test that was just run.
+ Raises:
+ FatalTestException: if there's anything wrong with the perf data.
+ """
+ if not self._IsPerfTest(test):
+ return
+ raw_test_name = test.split('#')[1]
+
+ # Wait and grab annotation data so we can figure out which traces to parse
+ regex = self.adb.WaitForLogMatch(re.compile('\*\*PERFANNOTATION\(' +
+ raw_test_name +
+ '\)\:(.*)'), None)
+
+ # If the test is set to run on a specific device type only (IE: only
+ # tablet or phone) and it is being run on the wrong device, the test
+ # just quits and does not do anything. The java test harness will still
+ # print the appropriate annotation for us, but will add --NORUN-- for
+ # us so we know to ignore the results.
+ # The --NORUN-- tag is managed by MainActivityTestBase.java
+ if regex.group(1) != '--NORUN--':
+
+ # Obtain the relevant perf data. The data is dumped to a
+ # JSON formatted file.
+ json_string = self.adb.GetFileContents(
+ '/data/data/com.google.android.apps.chrome/files/PerfTestData.txt')
+
+ if json_string:
+ json_string = '\n'.join(json_string)
+ else:
+ raise FatalTestException('Perf file does not exist or is empty')
+
+ if self.save_perf_json:
+ json_local_file = '/tmp/chromium-android-perf-json-' + raw_test_name
+ with open(json_local_file, 'w') as f:
+ f.write(json_string)
+ logging.info('Saving Perf UI JSON from test ' +
+ test + ' to ' + json_local_file)
+
+ raw_perf_data = regex.group(1).split(';')
+
+ for raw_perf_set in raw_perf_data:
+ if raw_perf_set:
+ perf_set = raw_perf_set.split(',')
+ if len(perf_set) != 3:
+ raise FatalTestException('Unexpected number of tokens in '
+ 'perf annotation string: ' + raw_perf_set)
+
+ # Process the performance data
+ result = GetAverageRunInfoFromJSONString(json_string, perf_set[0])
+
+ PrintPerfResult(perf_set[1], perf_set[2],
+ [result['average']], result['units'])
+
+ def _SetupIndividualTestTimeoutScale(self, test):
+ timeout_scale = self._GetIndividualTestTimeoutScale(test)
+ valgrind_tools.SetChromeTimeoutScale(self.adb, timeout_scale)
+
+ def _GetIndividualTestTimeoutScale(self, test):
+ """Returns the timeout scale for the given |test|."""
+ annotations = self.apks[0].GetTestAnnotations(test)
+ timeout_scale = 1
+ if 'TimeoutScale' in annotations:
+ for annotation in annotations:
+ scale_match = re.match('TimeoutScale:([0-9]+)', annotation)
+ if scale_match:
+ timeout_scale = int(scale_match.group(1))
+ if self.wait_for_debugger:
+ timeout_scale *= 100
+ return timeout_scale
+
+ def _GetIndividualTestTimeoutSecs(self, test):
+ """Returns the timeout in seconds for the given |test|."""
+ annotations = self.apks[0].GetTestAnnotations(test)
+ if 'Manual' in annotations:
+ return 600 * 60
+ if 'External' in annotations:
+ return 10 * 60
+ if 'LargeTest' in annotations or _PERF_TEST_ANNOTATION in annotations:
+ return 5 * 60
+ if 'MediumTest' in annotations:
+ return 3 * 60
+ return 1 * 60
+
+ def RunTests(self):
+ """Runs the tests, generating the coverage if needed.
+
+ Returns:
+ A TestResults object.
+ """
+ instrumentation_path = (self.instrumentation_class_path +
+ '/android.test.InstrumentationTestRunner')
+ instrumentation_args = self._GetInstrumentationArgs()
+ for test in self._GetTestsIter():
+ test_result = None
+ start_date_ms = None
+ try:
+ self.TestSetup(test)
+ start_date_ms = int(time.time()) * 1000
+ args_with_filter = dict(instrumentation_args)
+ args_with_filter['class'] = test
+ # |test_results| is a list that should contain
+ # a single TestResult object.
+ logging.warn(args_with_filter)
+ (test_results, _) = self.adb.Adb().StartInstrumentation(
+ instrumentation_path=instrumentation_path,
+ instrumentation_args=args_with_filter,
+ timeout_time=(self._GetIndividualTestTimeoutSecs(test) *
+ self._GetIndividualTestTimeoutScale(test) *
+ self.tool.GetTimeoutScale()))
+ duration_ms = int(time.time()) * 1000 - start_date_ms
+ assert len(test_results) == 1
+ test_result = test_results[0]
+ status_code = test_result.GetStatusCode()
+ if status_code:
+ log = test_result.GetFailureReason()
+ if not log:
+ log = 'No information.'
+ if self.screenshot_failures or log.find('INJECT_EVENTS perm') >= 0:
+ self._TakeScreenshot(test)
+ self.test_results.failed += [SingleTestResult(test, start_date_ms,
+ duration_ms, log)]
+ else:
+ result = [SingleTestResult(test, start_date_ms, duration_ms)]
+ self.test_results.ok += result
+ # Catch exceptions thrown by StartInstrumentation().
+ # See ../../third_party/android/testrunner/adb_interface.py
+ except (errors.WaitForResponseTimedOutError,
+ errors.DeviceUnresponsiveError,
+ errors.InstrumentationError), e:
+ if start_date_ms:
+ duration_ms = int(time.time()) * 1000 - start_date_ms
+ else:
+ start_date_ms = int(time.time()) * 1000
+ duration_ms = 0
+ message = str(e)
+ if not message:
+ message = 'No information.'
+ self.test_results.crashed += [SingleTestResult(test, start_date_ms,
+ duration_ms,
+ message)]
+ test_result = None
+ self.TestTeardown(test, test_result)
+ return self.test_results
+
+
+class TestSharder(BaseTestSharder):
+ """Responsible for sharding the tests on the connected devices."""
+
+ def __init__(self, attached_devices, options, tests, apks):
+ BaseTestSharder.__init__(self, attached_devices)
+ self.options = options
+ self.tests = tests
+ self.apks = apks
+
+ def SetupSharding(self, tests):
+ """Called before starting the shards."""
+ SetTestsContainer(sharded_tests_queue.ShardedTestsQueue(
+ len(self.attached_devices), tests))
+
+ def CreateShardedTestRunner(self, device, index):
+ """Creates a sharded test runner.
+
+ Args:
+ device: Device serial where this shard will run.
+ index: Index of this device in the pool.
+
+ Returns:
+ A TestRunner object.
+ """
+ return TestRunner(self.options, device, None, False, index, self.apks, [])
+
+
+def DispatchJavaTests(options, apks):
+ """Dispatches Java tests onto connected device(s).
+
+ If possible, this method will attempt to shard the tests to
+ all connected devices. Otherwise, dispatch and run tests on one device.
+
+ Args:
+ options: Command line options.
+ apks: list of APKs to use.
+
+ Returns:
+ A TestResults object holding the results of the Java tests.
+
+ Raises:
+ FatalTestException: when there's no attached the devices.
+ """
+ test_apk = apks[0]
+ if options.annotation:
+ available_tests = test_apk.GetAnnotatedTests(options.annotation)
+ if len(options.annotation) == 1 and options.annotation[0] == 'SmallTest':
+ tests_without_annotation = [
+ m for m in
+ test_apk.GetTestMethods()
+ if not test_apk.GetTestAnnotations(m) and
+ not apk_info.ApkInfo.IsPythonDrivenTest(m)]
+ if tests_without_annotation:
+ tests_without_annotation.sort()
+ logging.warning('The following tests do not contain any annotation. '
+ 'Assuming "SmallTest":\n%s',
+ '\n'.join(tests_without_annotation))
+ available_tests += tests_without_annotation
+ else:
+ available_tests = [m for m in test_apk.GetTestMethods()
+ if not apk_info.ApkInfo.IsPythonDrivenTest(m)]
+ coverage = os.environ.get('EMMA_INSTRUMENT') == 'true'
+
+ tests = []
+ if options.test_filter:
+ # |available_tests| are in adb instrument format: package.path.class#test.
+ filter_without_hash = options.test_filter.replace('#', '.')
+ tests = [t for t in available_tests
+ if filter_without_hash in t.replace('#', '.')]
+ else:
+ tests = available_tests
+
+ if not tests:
+ logging.warning('No Java tests to run with current args.')
+ return TestResults()
+
+ tests *= options.number_of_runs
+
+ attached_devices = android_commands.GetAttachedDevices()
+ test_results = TestResults()
+
+ if not attached_devices:
+ raise FatalTestException('You have no devices attached or visible!')
+ if options.device:
+ attached_devices = [options.device]
+
+ logging.info('Will run: %s', str(tests))
+
+ if len(attached_devices) > 1 and (coverage or options.wait_for_debugger):
+ logging.warning('Coverage / debugger can not be sharded, '
+ 'using first available device')
+ attached_devices = attached_devices[:1]
+ sharder = TestSharder(attached_devices, options, tests, apks)
+ test_results = sharder.RunShardedTests()
+ return test_results
diff --git a/media/webrtc/trunk/build/android/pylib/run_python_tests.py b/media/webrtc/trunk/build/android/pylib/run_python_tests.py
new file mode 100644
index 000000000..7d39f4829
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/run_python_tests.py
@@ -0,0 +1,207 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs the Python tests (relies on using the Java test runner)."""
+
+import logging
+import os
+import sys
+import types
+
+import android_commands
+import apk_info
+import constants
+import python_test_base
+from python_test_caller import CallPythonTest
+from python_test_sharder import PythonTestSharder
+import run_java_tests
+from run_java_tests import FatalTestException
+from test_info_collection import TestInfoCollection
+from test_result import TestResults
+
+
+def _GetPythonFiles(root, files):
+ """Returns all files from |files| that end in 'Test.py'.
+
+ Args:
+ root: A directory name with python files.
+ files: A list of file names.
+
+ Returns:
+ A list with all Python driven test file paths.
+ """
+ return [os.path.join(root, f) for f in files if f.endswith('Test.py')]
+
+
+def _InferImportNameFromFile(python_file):
+ """Given a file, infer the import name for that file.
+
+ Example: /usr/foo/bar/baz.py -> baz.
+
+ Args:
+ python_file: path to the Python file, ostensibly to import later.
+
+ Returns:
+ The module name for the given file.
+ """
+ return os.path.splitext(os.path.basename(python_file))[0]
+
+
+def DispatchPythonTests(options):
+ """Dispatches the Python tests. If there are multiple devices, use sharding.
+
+ Args:
+ options: command line options.
+
+ Returns:
+ A list of test results.
+ """
+
+ attached_devices = android_commands.GetAttachedDevices()
+ if not attached_devices:
+ raise FatalTestException('You have no devices attached or visible!')
+ if options.device:
+ attached_devices = [options.device]
+
+ test_collection = TestInfoCollection()
+ all_tests = _GetAllTests(options.python_test_root, options.official_build)
+ test_collection.AddTests(all_tests)
+ test_names = [t.qualified_name for t in all_tests]
+ logging.debug('All available tests: ' + str(test_names))
+
+ available_tests = test_collection.GetAvailableTests(
+ options.annotation, options.test_filter)
+
+ if not available_tests:
+ logging.warning('No Python tests to run with current args.')
+ return TestResults()
+
+ available_tests *= options.number_of_runs
+ test_names = [t.qualified_name for t in available_tests]
+ logging.debug('Final list of tests to run: ' + str(test_names))
+
+ # Copy files to each device before running any tests.
+ for device_id in attached_devices:
+ logging.debug('Pushing files to device %s', device_id)
+ apks = [apk_info.ApkInfo(options.test_apk_path, options.test_apk_jar_path)]
+ test_files_copier = run_java_tests.TestRunner(options, device_id,
+ None, False, 0, apks, [])
+ test_files_copier.CopyTestFilesOnce()
+
+ # Actually run the tests.
+ if len(attached_devices) > 1 and options.wait_for_debugger:
+ logging.warning('Debugger can not be sharded, '
+ 'using first available device')
+ attached_devices = attached_devices[:1]
+ logging.debug('Running Python tests')
+ sharder = PythonTestSharder(attached_devices, available_tests, options)
+ test_results = sharder.RunShardedTests()
+
+ return test_results
+
+
+def _GetTestModules(python_test_root, is_official_build):
+ """Retrieve a sorted list of pythonDrivenTests.
+
+ Walks the location of pythonDrivenTests, imports them, and provides the list
+ of imported modules to the caller.
+
+ Args:
+ python_test_root: the path to walk, looking for pythonDrivenTests
+ is_official_build: whether to run only those tests marked 'official'
+
+ Returns:
+ A list of Python modules which may have zero or more tests.
+ """
+ # By default run all python tests under pythonDrivenTests.
+ python_test_file_list = []
+ for root, _, files in os.walk(python_test_root):
+ if (root.endswith('pythonDrivenTests')
+ or (is_official_build
+ and root.endswith('pythonDrivenTests/official'))):
+ python_test_file_list += _GetPythonFiles(root, files)
+ python_test_file_list.sort()
+
+ test_module_list = [_GetModuleFromFile(test_file)
+ for test_file in python_test_file_list]
+ return test_module_list
+
+
+def _GetModuleFromFile(python_file):
+ """Gets the module associated with a file by importing it.
+
+ Args:
+ python_file: file to import
+
+ Returns:
+ The module object.
+ """
+ sys.path.append(os.path.dirname(python_file))
+ import_name = _InferImportNameFromFile(python_file)
+ return __import__(import_name)
+
+
+def _GetTestsFromClass(test_class):
+ """Create a list of test objects for each test method on this class.
+
+ Test methods are methods on the class which begin with 'test'.
+
+ Args:
+ test_class: class object which contains zero or more test methods.
+
+ Returns:
+ A list of test objects, each of which is bound to one test.
+ """
+ test_names = [m for m in dir(test_class)
+ if _IsTestMethod(m, test_class)]
+ return map(test_class, test_names)
+
+
+def _GetTestClassesFromModule(test_module):
+ tests = []
+ for name in dir(test_module):
+ attr = getattr(test_module, name)
+ if _IsTestClass(attr):
+ tests.extend(_GetTestsFromClass(attr))
+ return tests
+
+
+def _IsTestClass(test_class):
+ return (type(test_class) is types.TypeType and
+ issubclass(test_class, python_test_base.PythonTestBase) and
+ test_class is not python_test_base.PythonTestBase)
+
+
+def _IsTestMethod(attrname, test_case_class):
+ """Checks whether this is a valid test method.
+
+ Args:
+ attrname: the method name.
+ test_case_class: the test case class.
+
+ Returns:
+ True if test_case_class.'attrname' is callable and it starts with 'test';
+ False otherwise.
+ """
+ attr = getattr(test_case_class, attrname)
+ return callable(attr) and attrname.startswith('test')
+
+
+def _GetAllTests(test_root, is_official_build):
+ """Retrieve a list of Python test modules and their respective methods.
+
+ Args:
+ test_root: path which contains Python-driven test files
+ is_official_build: whether this is an official build
+
+ Returns:
+ List of test case objects for all available test methods.
+ """
+ if not test_root:
+ return []
+ all_tests = []
+ test_module_list = _GetTestModules(test_root, is_official_build)
+ for module in test_module_list:
+ all_tests.extend(_GetTestClassesFromModule(module))
+ return all_tests
diff --git a/media/webrtc/trunk/build/android/pylib/run_tests_helper.py b/media/webrtc/trunk/build/android/pylib/run_tests_helper.py
new file mode 100644
index 000000000..15e5d5381
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/run_tests_helper.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions common to native, java and python test runners."""
+
+import logging
+import os
+
+
+def GetExpectations(file_name):
+ """Returns a list of test names in the |file_name| test expectations file."""
+ if not file_name or not os.path.exists(file_name):
+ return []
+ return [x for x in [x.strip() for x in file(file_name).readlines()]
+ if x and x[0] != '#']
+
+
+def SetLogLevel(verbose_count):
+ """Sets log level as |verbose_count|."""
+ log_level = logging.WARNING # Default.
+ if verbose_count == 1:
+ log_level = logging.INFO
+ elif verbose_count >= 2:
+ log_level = logging.DEBUG
+ logging.getLogger().setLevel(log_level)
diff --git a/media/webrtc/trunk/build/android/pylib/sharded_tests_queue.py b/media/webrtc/trunk/build/android/pylib/sharded_tests_queue.py
new file mode 100644
index 000000000..9e28e2c48
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/sharded_tests_queue.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A module that contains a queue for running sharded tests."""
+
+import multiprocessing
+
+
+class ShardedTestsQueue(object):
+ """A queue for managing pending tests across different runners.
+
+ This class should only be used when sharding.
+
+ Attributes:
+ num_devices: an integer; the number of attached Android devices.
+ tests: a list of tests to be run.
+ tests_queue: if sharding, a JoinableQueue object that holds tests from
+ |tests|. Otherwise, a list holding tests.
+ results_queue: a Queue object to hold TestResults objects.
+ """
+ _STOP_SENTINEL = 'STOP' # sentinel value for iter()
+
+ def __init__(self, num_devices, tests):
+ self.num_devices = num_devices
+ self.tests_queue = multiprocessing.Queue()
+ for test in tests:
+ self.tests_queue.put(test)
+ for _ in xrange(self.num_devices):
+ self.tests_queue.put(ShardedTestsQueue._STOP_SENTINEL)
+
+ def __iter__(self):
+ """Returns an iterator with the test cases."""
+ return iter(self.tests_queue.get, ShardedTestsQueue._STOP_SENTINEL)
diff --git a/media/webrtc/trunk/build/android/pylib/single_test_runner.py b/media/webrtc/trunk/build/android/pylib/single_test_runner.py
new file mode 100644
index 000000000..a680c68f1
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/single_test_runner.py
@@ -0,0 +1,343 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import logging
+import os
+import sys
+
+from base_test_runner import BaseTestRunner
+import debug_info
+import constants
+import perf_tests_helper
+import run_tests_helper
+from test_package_apk import TestPackageApk
+from test_package_executable import TestPackageExecutable
+from test_result import TestResults
+
+
+class SingleTestRunner(BaseTestRunner):
+ """Single test suite attached to a single device.
+
+ Args:
+ device: Device to run the tests.
+ test_suite: A specific test suite to run, empty to run all.
+ gtest_filter: A gtest_filter flag.
+ test_arguments: Additional arguments to pass to the test binary.
+ timeout: Timeout for each test.
+ rebaseline: Whether or not to run tests in isolation and update the filter.
+ performance_test: Whether or not performance test(s).
+ cleanup_test_files: Whether or not to cleanup test files on device.
+ tool: Name of the Valgrind tool.
+ shard_index: index number of the shard on which the test suite will run.
+ dump_debug_info: Whether or not to dump debug information.
+ build_type: 'Release' or 'Debug'.
+ """
+
+ def __init__(self, device, test_suite, gtest_filter, test_arguments, timeout,
+ rebaseline, performance_test, cleanup_test_files, tool_name,
+ shard_index, dump_debug_info, fast_and_loose, build_type):
+ BaseTestRunner.__init__(self, device, tool_name, shard_index, build_type)
+ self._running_on_emulator = self.device.startswith('emulator')
+ self._gtest_filter = gtest_filter
+ self._test_arguments = test_arguments
+ self.test_results = TestResults()
+ if dump_debug_info:
+ self.dump_debug_info = debug_info.GTestDebugInfo(self.adb, device,
+ os.path.basename(test_suite), gtest_filter)
+ else:
+ self.dump_debug_info = None
+ self.fast_and_loose = fast_and_loose
+
+ logging.warning('Test suite: ' + test_suite)
+ if os.path.splitext(test_suite)[1] == '.apk':
+ self.test_package = TestPackageApk(self.adb, device,
+ test_suite, timeout, rebaseline, performance_test, cleanup_test_files,
+ self.tool, self.dump_debug_info)
+ else:
+ self.test_package = TestPackageExecutable(
+ self.adb, device,
+ test_suite, timeout, rebaseline, performance_test, cleanup_test_files,
+ self.tool, self.dump_debug_info)
+ self._performance_test_setup = None
+ if performance_test:
+ self._performance_test_setup = perf_tests_helper.PerfTestSetup(self.adb)
+
+ def _TestSuiteRequiresMockTestServer(self):
+ """Returns True if the test suite requires mock test server."""
+ return False
+ # TODO(yfriedman): Disabled because of flakiness.
+ # (self.test_package.test_suite_basename == 'unit_tests' or
+ # self.test_package.test_suite_basename == 'net_unittests' or
+ # False)
+
+ def _GetFilterFileName(self):
+ """Returns the filename of gtest filter."""
+ return os.path.join(sys.path[0], 'gtest_filter',
+ self.test_package.test_suite_basename + '_disabled')
+
+ def _GetAdditionalEmulatorFilterName(self):
+ """Returns the filename of additional gtest filter for emulator."""
+ return os.path.join(sys.path[0], 'gtest_filter',
+ self.test_package.test_suite_basename +
+ '_emulator_additional_disabled')
+
+ def GetDisabledTests(self):
+ """Returns a list of disabled tests.
+
+ Returns:
+ A list of disabled tests obtained from gtest_filter/test_suite_disabled.
+ """
+ disabled_tests = run_tests_helper.GetExpectations(self._GetFilterFileName())
+ if self._running_on_emulator:
+ # Append emulator's filter file.
+ disabled_tests.extend(run_tests_helper.GetExpectations(
+ self._GetAdditionalEmulatorFilterName()))
+ return disabled_tests
+
+ def UpdateFilter(self, failed_tests):
+ """Updates test_suite_disabled file with the new filter (deletes if empty).
+
+ If running in Emulator, only the failed tests which are not in the normal
+ filter returned by _GetFilterFileName() are written to emulator's
+ additional filter file.
+
+ Args:
+ failed_tests: A sorted list of failed tests.
+ """
+ disabled_tests = []
+ if not self._running_on_emulator:
+ filter_file_name = self._GetFilterFileName()
+ else:
+ filter_file_name = self._GetAdditionalEmulatorFilterName()
+ disabled_tests.extend(
+ run_tests_helper.GetExpectations(self._GetFilterFileName()))
+ logging.info('About to update emulator\'s additional filter (%s).'
+ % filter_file_name)
+
+ new_failed_tests = []
+ if failed_tests:
+ for test in failed_tests:
+ if test.name not in disabled_tests:
+ new_failed_tests.append(test.name)
+
+ if not new_failed_tests:
+ if os.path.exists(filter_file_name):
+ os.unlink(filter_file_name)
+ return
+
+ filter_file = file(filter_file_name, 'w')
+ if self._running_on_emulator:
+ filter_file.write('# Addtional list of suppressions from emulator\n')
+ else:
+ filter_file.write('# List of suppressions\n')
+ filter_file.write('# This file was automatically generated by %s\n'
+ % sys.argv[0])
+ filter_file.write('\n'.join(sorted(new_failed_tests)))
+ filter_file.write('\n')
+ filter_file.close()
+
+ def GetDataFilesForTestSuite(self):
+ """Returns a list of data files/dirs needed by the test suite."""
+ # Ideally, we'd just push all test data. However, it has >100MB, and a lot
+ # of the files are not relevant (some are used for browser_tests, others for
+ # features not supported, etc..).
+ if self.test_package.test_suite_basename in ['base_unittests',
+ 'sql_unittests',
+ 'unit_tests']:
+ test_files = [
+ 'base/data/file_util_unittest',
+ 'base/data/json/bom_feff.json',
+ 'chrome/test/data/download-test1.lib',
+ 'chrome/test/data/extensions/bad_magic.crx',
+ 'chrome/test/data/extensions/good.crx',
+ 'chrome/test/data/extensions/icon1.png',
+ 'chrome/test/data/extensions/icon2.png',
+ 'chrome/test/data/extensions/icon3.png',
+ 'chrome/test/data/extensions/allow_silent_upgrade/',
+ 'chrome/test/data/extensions/app/',
+ 'chrome/test/data/extensions/bad/',
+ 'chrome/test/data/extensions/effective_host_permissions/',
+ 'chrome/test/data/extensions/empty_manifest/',
+ 'chrome/test/data/extensions/good/Extensions/',
+ 'chrome/test/data/extensions/manifest_tests/',
+ 'chrome/test/data/extensions/page_action/',
+ 'chrome/test/data/extensions/permissions/',
+ 'chrome/test/data/extensions/script_and_capture/',
+ 'chrome/test/data/extensions/unpacker/',
+ 'chrome/test/data/bookmarks/',
+ 'chrome/test/data/components/',
+ 'chrome/test/data/extensions/json_schema_test.js',
+ 'chrome/test/data/History/',
+ 'chrome/test/data/json_schema_validator/',
+ 'chrome/test/data/pref_service/',
+ 'chrome/test/data/serializer_nested_test.js',
+ 'chrome/test/data/serializer_test.js',
+ 'chrome/test/data/serializer_test_nowhitespace.js',
+ 'chrome/test/data/top_sites/',
+ 'chrome/test/data/web_app_info/',
+ 'chrome/test/data/web_database',
+ 'chrome/test/data/webui/',
+ 'chrome/test/data/zip',
+ 'chrome/third_party/mock4js/',
+ 'content/browser/gpu/software_rendering_list.json',
+ 'net/data/cache_tests/insert_load1',
+ 'net/data/cache_tests/dirty_entry5',
+ 'net/data/ssl/certificates/',
+ 'ui/base/test/data/data_pack_unittest',
+ ]
+ if self.test_package.test_suite_basename == 'unit_tests':
+ test_files += ['chrome/test/data/simple_open_search.xml']
+ # The following are spell check data. Now only list the data under
+ # third_party/hunspell_dictionaries which are used by unit tests.
+ old_cwd = os.getcwd()
+ os.chdir(constants.CHROME_DIR)
+ test_files += glob.glob('third_party/hunspell_dictionaries/*.bdic')
+ os.chdir(old_cwd)
+ return test_files
+ elif self.test_package.test_suite_basename == 'net_unittests':
+ return [
+ 'net/data/cache_tests',
+ 'net/data/filter_unittests',
+ 'net/data/ftp',
+ 'net/data/proxy_resolver_v8_unittest',
+ 'net/data/ssl/certificates',
+ 'net/data/url_request_unittest/',
+ 'net/data/proxy_script_fetcher_unittest'
+ ]
+ elif self.test_package.test_suite_basename == 'ui_tests':
+ return [
+ 'chrome/test/data/dromaeo',
+ 'chrome/test/data/json2.js',
+ 'chrome/test/data/sunspider',
+ 'chrome/test/data/v8_benchmark',
+ 'chrome/test/perf/sunspider_uitest.js',
+ 'chrome/test/perf/v8_benchmark_uitest.js',
+ ]
+ elif self.test_package.test_suite_basename == 'page_cycler_tests':
+ data = [
+ 'tools/page_cycler',
+ 'data/page_cycler',
+ ]
+ for d in data:
+ if not os.path.exists(d):
+ raise Exception('Page cycler data not found.')
+ return data
+ elif self.test_package.test_suite_basename == 'webkit_unit_tests':
+ return [
+ 'third_party/WebKit/Source/WebKit/chromium/tests/data',
+ ]
+ elif self.test_package.test_suite_basename == 'content_unittests':
+ return [
+ 'content/test/data/gpu/webgl_conformance_test_expectations.txt',
+ 'net/data/ssl/certificates/',
+ 'webkit/data/dom_storage/webcore_test_database.localstorage',
+ 'third_party/hyphen/hyph_en_US.dic',
+ ]
+ elif self.test_package.test_suite_basename == 'media_unittests':
+ return [
+ 'media/test/data',
+ ]
+ return []
+
+ def LaunchHelperToolsForTestSuite(self):
+ """Launches helper tools for the test suite.
+
+ Sometimes one test may need to run some helper tools first in order to
+ successfully complete the test.
+ """
+ if self._TestSuiteRequiresMockTestServer():
+ self.LaunchChromeTestServerSpawner()
+
+ def StripAndCopyFiles(self):
+ """Strips and copies the required data files for the test suite."""
+ self.test_package.StripAndCopyExecutable()
+ self.test_package.PushDataAndPakFiles()
+ self.tool.CopyFiles()
+ test_data = self.GetDataFilesForTestSuite()
+ if test_data and not self.fast_and_loose:
+ # Make sure SD card is ready.
+ self.adb.WaitForSdCardReady(20)
+ for data in test_data:
+ self.CopyTestData([data], self.adb.GetExternalStorage())
+
+ def RunTestsWithFilter(self):
+ """Runs a tests via a small, temporary shell script."""
+ self.test_package.CreateTestRunnerScript(self._gtest_filter,
+ self._test_arguments)
+ self.test_results = self.test_package.RunTestsAndListResults()
+
+ def RebaselineTests(self):
+ """Runs all available tests, restarting in case of failures."""
+ if self._gtest_filter:
+ all_tests = set(self._gtest_filter.split(':'))
+ else:
+ all_tests = set(self.test_package.GetAllTests())
+ failed_results = set()
+ executed_results = set()
+ while True:
+ executed_names = set([f.name for f in executed_results])
+ self._gtest_filter = ':'.join(all_tests - executed_names)
+ self.RunTestsWithFilter()
+ failed_results.update(self.test_results.crashed,
+ self.test_results.failed)
+ executed_results.update(self.test_results.crashed,
+ self.test_results.failed,
+ self.test_results.ok)
+ executed_names = set([f.name for f in executed_results])
+ logging.info('*' * 80)
+ logging.info(self.device)
+ logging.info('Executed: ' + str(len(executed_names)) + ' of ' +
+ str(len(all_tests)))
+ logging.info('Failed so far: ' + str(len(failed_results)) + ' ' +
+ str([f.name for f in failed_results]))
+ logging.info('Remaining: ' + str(len(all_tests - executed_names)) + ' ' +
+ str(all_tests - executed_names))
+ logging.info('*' * 80)
+ if executed_names == all_tests:
+ break
+ self.test_results = TestResults.FromRun(
+ ok=list(executed_results - failed_results),
+ failed=list(failed_results))
+
+ def RunTests(self):
+ """Runs all tests (in rebaseline mode, runs each test in isolation).
+
+ Returns:
+ A TestResults object.
+ """
+ if self.test_package.rebaseline:
+ self.RebaselineTests()
+ else:
+ if not self._gtest_filter:
+ self._gtest_filter = ('-' + ':'.join(self.GetDisabledTests()) + ':' +
+ ':'.join(['*.' + x + '*' for x in
+ self.test_package.GetDisabledPrefixes()]))
+ self.RunTestsWithFilter()
+ return self.test_results
+
+ def SetUp(self):
+ """Sets up necessary test enviroment for the test suite."""
+ super(SingleTestRunner, self).SetUp()
+ self.adb.ClearApplicationState(constants.CHROME_PACKAGE)
+ if self._performance_test_setup:
+ self._performance_test_setup.SetUp()
+ if self.dump_debug_info:
+ self.dump_debug_info.StartRecordingLog(True)
+ self.StripAndCopyFiles()
+ self.LaunchHelperToolsForTestSuite()
+ self.tool.SetupEnvironment()
+
+ def TearDown(self):
+ """Cleans up the test enviroment for the test suite."""
+ self.tool.CleanUpEnvironment()
+ if self.test_package.cleanup_test_files:
+ self.adb.RemovePushedFiles()
+ if self.dump_debug_info:
+ self.dump_debug_info.StopRecordingLog()
+ if self._performance_test_setup:
+ self._performance_test_setup.TearDown()
+ if self.dump_debug_info:
+ self.dump_debug_info.ArchiveNewCrashFiles()
+ super(SingleTestRunner, self).TearDown()
diff --git a/media/webrtc/trunk/build/android/pylib/test_info_collection.py b/media/webrtc/trunk/build/android/pylib/test_info_collection.py
new file mode 100644
index 000000000..fc4e80694
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/test_info_collection.py
@@ -0,0 +1,137 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing information about the python-driven tests."""
+
+import logging
+import os
+
+import tests_annotations
+
+
+class TestInfo(object):
+ """An object containing and representing a test function, plus metadata."""
+
+ def __init__(self, runnable, set_up=None, tear_down=None):
+ # The actual test function/method.
+ self.runnable = runnable
+ # Qualified name of test function/method (e.g. FooModule.testBar).
+ self.qualified_name = self._GetQualifiedName(runnable)
+ # setUp and teardown functions, if any.
+ self.set_up = set_up
+ self.tear_down = tear_down
+
+ def _GetQualifiedName(self, runnable):
+ """Helper method to infer a runnable's name and module name.
+
+ Many filters and lists presuppose a format of module_name.testMethodName.
+ To make this easy on everyone, we use some reflection magic to infer this
+ name automatically.
+
+ Args:
+ runnable: the test method to get the qualified name for
+
+ Returns:
+ qualified name for this runnable, incl. module name and method name.
+ """
+ runnable_name = runnable.__name__
+ # See also tests_annotations.
+ module_name = os.path.splitext(
+ os.path.basename(runnable.__globals__['__file__']))[0]
+ return '.'.join([module_name, runnable_name])
+
+ def __str__(self):
+ return self.qualified_name
+
+
+class TestInfoCollection(object):
+ """A collection of TestInfo objects which facilitates filtering."""
+
+ def __init__(self):
+ """Initialize a new TestInfoCollection."""
+ # Master list of all valid tests.
+ self.all_tests = []
+
+ def AddTests(self, test_infos):
+ """Adds a set of tests to this collection.
+
+ The user may then retrieve them, optionally according to criteria, via
+ GetAvailableTests().
+
+ Args:
+ test_infos: a list of TestInfos representing test functions/methods.
+ """
+ self.all_tests = test_infos
+
+ def GetAvailableTests(self, annotation, name_filter):
+ """Get a collection of TestInfos which match the supplied criteria.
+
+ Args:
+ annotation: annotation which tests must match, if any
+ name_filter: name filter which tests must match, if any
+
+ Returns:
+ List of available tests.
+ """
+ available_tests = self.all_tests
+
+ # Filter out tests which match neither the requested annotation, nor the
+ # requested name filter, if any.
+ available_tests = [t for t in available_tests if
+ self._AnnotationIncludesTest(t, annotation)]
+ if annotation and len(annotation) == 1 and annotation[0] == 'SmallTest':
+ tests_without_annotation = [
+ t for t in self.all_tests if
+ not tests_annotations.AnnotatedFunctions.GetTestAnnotations(
+ t.qualified_name)]
+ test_names = [t.qualified_name for t in tests_without_annotation]
+ logging.warning('The following tests do not contain any annotation. '
+ 'Assuming "SmallTest":\n%s',
+ '\n'.join(test_names))
+ available_tests += tests_without_annotation
+ available_tests = [t for t in available_tests if
+ self._NameFilterIncludesTest(t, name_filter)]
+
+ return available_tests
+
+ def _AnnotationIncludesTest(self, test_info, annotation_filter_list):
+ """Checks whether a given test represented by test_info matches annotation.
+
+ Args:
+ test_info: TestInfo object representing the test
+ annotation_filter_list: list of annotation filters to match (e.g. Smoke)
+
+ Returns:
+ True if no annotation was supplied or the test matches; false otherwise.
+ """
+ if not annotation_filter_list:
+ return True
+ for annotation_filter in annotation_filter_list:
+ filters = annotation_filter.split('=')
+ if len(filters) == 2:
+ key = filters[0]
+ value_list = filters[1].split(',')
+ for value in value_list:
+ if tests_annotations.AnnotatedFunctions.IsAnnotated(
+ key + ':' + value, test_info.qualified_name):
+ return True
+ elif tests_annotations.AnnotatedFunctions.IsAnnotated(
+ annotation_filter, test_info.qualified_name):
+ return True
+ return False
+
+ def _NameFilterIncludesTest(self, test_info, name_filter):
+ """Checks whether a name filter matches a given test_info's method name.
+
+ This is a case-sensitive, substring comparison: 'Foo' will match methods
+ Foo.testBar and Bar.testFoo. 'foo' would not match either.
+
+ Args:
+ test_info: TestInfo object representing the test
+ name_filter: substring to check for in the qualified name of the test
+
+ Returns:
+ True if no name filter supplied or it matches; False otherwise.
+ """
+ return not name_filter or name_filter in test_info.qualified_name
diff --git a/media/webrtc/trunk/build/android/pylib/test_options_parser.py b/media/webrtc/trunk/build/android/pylib/test_options_parser.py
new file mode 100644
index 000000000..ee00f1fff
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/test_options_parser.py
@@ -0,0 +1,143 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Parses options for the instrumentation tests."""
+
+import constants
+import optparse
+import os
+import sys
+
+_SDK_OUT_DIR = os.path.join(constants.CHROME_DIR, 'out')
+
+
+def AddBuildTypeOption(option_parser):
+ """Decorates OptionParser with build type option."""
+ default_build_type = 'Debug'
+ if 'BUILDTYPE' in os.environ:
+ default_build_type = os.environ['BUILDTYPE']
+ option_parser.add_option('--debug', action='store_const', const='Debug',
+ dest='build_type', default=default_build_type,
+ help='If set, run test suites under out/Debug. '
+ 'Default is env var BUILDTYPE or Debug')
+ option_parser.add_option('--release', action='store_const', const='Release',
+ dest='build_type',
+ help='If set, run test suites under out/Release. '
+ 'Default is env var BUILDTYPE or Debug.')
+
+def AddInstallAPKOption(option_parser):
+ """Decorates OptionParser with apk option used to install the APK."""
+ option_parser.add_option('--apk',
+ help=('The name of the apk containing the '
+ ' application (with the .apk extension).'))
+ option_parser.add_option('--apk_package',
+ help=('The package name used by the apk containing '
+ 'the application.'))
+
+def AddTestRunnerOptions(option_parser, default_timeout=60):
+ """Decorates OptionParser with options applicable to all tests."""
+
+ option_parser.add_option('-t', dest='timeout',
+ help='Timeout to wait for each test',
+ type='int',
+ default=default_timeout)
+ option_parser.add_option('-c', dest='cleanup_test_files',
+ help='Cleanup test files on the device after run',
+ action='store_true')
+ option_parser.add_option('-v',
+ '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level (multiple times for more)')
+ profilers = ['devicestatsmonitor', 'chrometrace', 'dumpheap', 'smaps',
+ 'traceview']
+ option_parser.add_option('--profiler', dest='profilers', action='append',
+ choices=profilers,
+ help='Profiling tool to run during test. '
+ 'Pass multiple times to run multiple profilers. '
+ 'Available profilers: %s' % profilers)
+ option_parser.add_option('--tool',
+ dest='tool',
+ help='Run the test under a tool '
+ '(use --tool help to list them)')
+ AddBuildTypeOption(option_parser)
+
+
+def AddInstrumentationOptions(option_parser):
+ """Decorates OptionParser with instrumentation tests options."""
+
+ AddTestRunnerOptions(option_parser)
+ option_parser.add_option('-w', '--wait_debugger', dest='wait_for_debugger',
+ action='store_true', help='Wait for debugger.')
+ option_parser.add_option('-I', dest='install_apk', help='Install APK.',
+ action='store_true')
+ option_parser.add_option('-f', '--test_filter',
+ help='Test filter (if not fully qualified, '
+ 'will run all matches).')
+ option_parser.add_option('-A', '--annotation', dest='annotation_str',
+ help=('Run only tests with any of the given '
+ 'annotations. '
+ 'An annotation can be either a key or a '
+ 'key-values pair. '
+ 'A test that has no annotation is '
+ 'considered "SmallTest".'))
+ option_parser.add_option('-j', '--java_only', action='store_true',
+ help='Run only the Java tests.')
+ option_parser.add_option('-p', '--python_only', action='store_true',
+ help='Run only the Python tests.')
+ option_parser.add_option('-n', '--run_count', type='int',
+ dest='number_of_runs', default=1,
+ help=('How many times to run each test, regardless '
+ 'of the result. (Default is 1)'))
+ option_parser.add_option('--test-apk', dest='test_apk',
+ help=('The name of the apk containing the tests '
+ '(without the .apk extension). For SDK '
+ 'builds, the apk name without the debug '
+ 'suffix(for example, ContentShellTest).'))
+ option_parser.add_option('--screenshot', dest='screenshot_failures',
+ action='store_true',
+ help='Capture screenshots of test failures')
+ option_parser.add_option('--save-perf-json', action='store_true',
+ help='Saves the JSON file for each UI Perf test.')
+ option_parser.add_option('--shard_retries', type=int, default=1,
+ help=('Number of times to retry each failure when '
+ 'sharding.'))
+ option_parser.add_option('--official-build', help='Run official build tests.')
+ option_parser.add_option('--device',
+ help='Serial number of device we should use.')
+ option_parser.add_option('--python_test_root',
+ help='Root of the python-driven tests.')
+
+def ValidateInstrumentationOptions(option_parser, options, args):
+ """Validate options/arguments and populate options with defaults."""
+ if len(args) > 1:
+ option_parser.print_help(sys.stderr)
+ option_parser.error('Unknown arguments: %s' % args[1:])
+ if options.java_only and options.python_only:
+ option_parser.error('Options java_only (-j) and python_only (-p) '
+ 'are mutually exclusive.')
+
+ options.run_java_tests = True
+ options.run_python_tests = True
+ if options.java_only:
+ options.run_python_tests = False
+ elif options.python_only:
+ options.run_java_tests = False
+
+ # In case of SDK Build, the jars and apks have a -debug suffix.
+ options.test_apk_path = os.path.join(_SDK_OUT_DIR,
+ options.build_type,
+ constants.SDK_BUILD_APKS_DIR,
+ '%s-debug.apk' % options.test_apk)
+ options.test_apk_jar_path = os.path.join(_SDK_OUT_DIR,
+ options.build_type,
+ constants.SDK_BUILD_TEST_JAVALIB_DIR,
+ '%s-debug.jar' % options.test_apk)
+ if options.annotation_str:
+ options.annotation = options.annotation_str.split()
+ elif options.test_filter:
+ options.annotation = []
+ else:
+ options.annotation = ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest']
diff --git a/media/webrtc/trunk/build/android/pylib/test_package.py b/media/webrtc/trunk/build/android/pylib/test_package.py
new file mode 100644
index 000000000..a47ed72d8
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/test_package.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import re
+import os
+
+import constants
+from perf_tests_helper import PrintPerfResult
+from pylib import pexpect
+from test_result import BaseTestResult, TestResults
+
+
+# TODO(bulach): TestPackage, TestPackageExecutable and
+# TestPackageApk are a work in progress related to making the native tests
+# run as a NDK-app from an APK rather than a stand-alone executable.
+class TestPackage(object):
+ """A helper base class for both APK and stand-alone executables.
+
+ Args:
+ adb: ADB interface the tests are using.
+ device: Device to run the tests.
+ test_suite: A specific test suite to run, empty to run all.
+ timeout: Timeout for each test.
+ rebaseline: Whether or not to run tests in isolation and update the filter.
+ performance_test: Whether or not performance test(s).
+ cleanup_test_files: Whether or not to cleanup test files on device.
+ tool: Name of the Valgrind tool.
+ dump_debug_info: A debug_info object.
+ """
+
+ def __init__(self, adb, device, test_suite, timeout, rebaseline,
+ performance_test, cleanup_test_files, tool, dump_debug_info):
+ self.adb = adb
+ self.device = device
+ self.test_suite_full = test_suite
+ self.test_suite = os.path.splitext(test_suite)[0]
+ self.test_suite_basename = self._GetTestSuiteBaseName()
+ self.test_suite_dirname = os.path.dirname(
+ self.test_suite.split(self.test_suite_basename)[0])
+ self.rebaseline = rebaseline
+ self.performance_test = performance_test
+ self.cleanup_test_files = cleanup_test_files
+ self.tool = tool
+ if timeout == 0:
+ timeout = 60
+ # On a VM (e.g. chromium buildbots), this timeout is way too small.
+ if os.environ.get('BUILDBOT_SLAVENAME'):
+ timeout = timeout * 2
+ self.timeout = timeout * self.tool.GetTimeoutScale()
+ self.dump_debug_info = dump_debug_info
+
+ def _BeginGetIOStats(self):
+ """Gets I/O statistics before running test.
+
+ Return:
+ I/O stats object.The I/O stats object may be None if the test is not
+ performance test.
+ """
+ initial_io_stats = None
+ # Try to get the disk I/O statistics for all performance tests.
+ if self.performance_test and not self.rebaseline:
+ initial_io_stats = self.adb.GetIoStats()
+ return initial_io_stats
+
+ def _EndGetIOStats(self, initial_io_stats):
+ """Gets I/O statistics after running test and calcuate the I/O delta.
+
+ Args:
+ initial_io_stats: I/O stats object got from _BeginGetIOStats.
+
+ Return:
+ String for formated diso I/O statistics.
+ """
+ disk_io = ''
+ if self.performance_test and initial_io_stats:
+ final_io_stats = self.adb.GetIoStats()
+ for stat in final_io_stats:
+ disk_io += '\n' + PrintPerfResult(stat, stat,
+ [final_io_stats[stat] -
+ initial_io_stats[stat]],
+ stat.split('_')[1],
+ print_to_stdout=False)
+ logging.info(disk_io)
+ return disk_io
+
+ def GetDisabledPrefixes(self):
+ return ['DISABLED_', 'FLAKY_', 'FAILS_']
+
+ def _ParseGTestListTests(self, all_tests):
+ ret = []
+ current = ''
+ disabled_prefixes = self.GetDisabledPrefixes()
+ for test in all_tests:
+ if not test:
+ continue
+ if test[0] != ' ' and not test.endswith('.'):
+ # Ignore any lines with unexpected format.
+ continue
+ if test[0] != ' ' and test.endswith('.'):
+ current = test
+ continue
+ if 'YOU HAVE' in test:
+ break
+ test_name = test[2:]
+ if not any([test_name.startswith(x) for x in disabled_prefixes]):
+ ret += [current + test_name]
+ return ret
+
+ def PushDataAndPakFiles(self):
+ external_storage = self.adb.GetExternalStorage()
+ if (self.test_suite_basename == 'ui_unittests' or
+ self.test_suite_basename == 'unit_tests'):
+ self.adb.PushIfNeeded(
+ self.test_suite_dirname + '/chrome.pak',
+ external_storage + '/paks/chrome.pak')
+ self.adb.PushIfNeeded(
+ self.test_suite_dirname + '/locales/en-US.pak',
+ external_storage + '/paks/en-US.pak')
+ if self.test_suite_basename == 'unit_tests':
+ self.adb.PushIfNeeded(
+ self.test_suite_dirname + '/resources.pak',
+ external_storage + '/paks/resources.pak')
+ self.adb.PushIfNeeded(
+ self.test_suite_dirname + '/chrome_100_percent.pak',
+ external_storage + '/paks/chrome_100_percent.pak')
+ self.adb.PushIfNeeded(self.test_suite_dirname + '/test_data',
+ external_storage + '/test_data')
+ if self.test_suite_basename == 'content_unittests':
+ self.adb.PushIfNeeded(
+ self.test_suite_dirname + '/content_resources.pak',
+ external_storage + '/paks/content_resources.pak')
+
+ def _WatchTestOutput(self, p):
+ """Watches the test output.
+ Args:
+ p: the process generating output as created by pexpect.spawn.
+ """
+ ok_tests = []
+ failed_tests = []
+ crashed_tests = []
+ timed_out = False
+ overall_fail = False
+ re_run = re.compile('\[ RUN \] ?(.*)\r\n')
+ # APK tests rely on the PASSED tag.
+ re_passed = re.compile('\[ PASSED \] ?(.*)\r\n')
+ # Signal handlers are installed before starting tests
+ # to output the CRASHED marker when a crash happens.
+ re_crash = re.compile('\[ CRASHED \](.*)\r\n')
+ re_fail = re.compile('\[ FAILED \] ?(.*)\r\n')
+ re_runner_fail = re.compile('\[ RUNNER_FAILED \] ?(.*)\r\n')
+ re_ok = re.compile('\[ OK \] ?(.*?) .*\r\n')
+ io_stats_before = self._BeginGetIOStats()
+ try:
+ while True:
+ found = p.expect([re_run, re_passed, re_runner_fail],
+ timeout=self.timeout)
+ if found == 1: # matched PASSED.
+ break
+ if found == 2: # RUNNER_FAILED
+ logging.error('RUNNER_FAILED')
+ overall_fail = True
+ break
+ if self.dump_debug_info:
+ self.dump_debug_info.TakeScreenshot('_Test_Start_Run_')
+ full_test_name = p.match.group(1).replace('\r', '')
+ found = p.expect([re_ok, re_fail, re_crash], timeout=self.timeout)
+ if found == 0: # re_ok
+ if full_test_name == p.match.group(1).replace('\r', ''):
+ ok_tests += [BaseTestResult(full_test_name, p.before)]
+ continue
+ if found == 2: # re_crash
+ crashed_tests += [BaseTestResult(full_test_name, p.before)]
+ overall_fail = True
+ break
+ # The test failed.
+ failed_tests += [BaseTestResult(full_test_name, p.before)]
+ except pexpect.EOF:
+ logging.error('Test terminated - EOF')
+ except pexpect.TIMEOUT:
+ logging.error('Test terminated after %d second timeout.',
+ self.timeout)
+ timed_out = True
+ finally:
+ p.close()
+ if not self.rebaseline:
+ ok_tests += self._EndGetIOStats(io_stats_before)
+ ret_code = self._GetGTestReturnCode()
+ if ret_code:
+ failed_tests += [BaseTestResult('gtest exit code: %d' % ret_code,
+ 'pexpect.before: %s'
+ '\npexpect.after: %s'
+ % (p.before,
+ p.after))]
+ # Create TestResults and return
+ return TestResults.FromRun(ok=ok_tests, failed=failed_tests,
+ crashed=crashed_tests, timed_out=timed_out,
+ overall_fail=overall_fail)
diff --git a/media/webrtc/trunk/build/android/pylib/test_package_apk.py b/media/webrtc/trunk/build/android/pylib/test_package_apk.py
new file mode 100644
index 000000000..42b9ade68
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/test_package_apk.py
@@ -0,0 +1,121 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import os
+import shlex
+import sys
+import tempfile
+import time
+
+import android_commands
+import constants
+from test_package import TestPackage
+from pylib import pexpect
+
+class TestPackageApk(TestPackage):
+ """A helper class for running APK-based native tests.
+
+ Args:
+ adb: ADB interface the tests are using.
+ device: Device to run the tests.
+ test_suite: A specific test suite to run, empty to run all.
+ timeout: Timeout for each test.
+ rebaseline: Whether or not to run tests in isolation and update the filter.
+ performance_test: Whether or not performance test(s).
+ cleanup_test_files: Whether or not to cleanup test files on device.
+ tool: Name of the Valgrind tool.
+ dump_debug_info: A debug_info object.
+ """
+
+ def __init__(self, adb, device, test_suite, timeout, rebaseline,
+ performance_test, cleanup_test_files, tool,
+ dump_debug_info):
+ TestPackage.__init__(self, adb, device, test_suite, timeout,
+ rebaseline, performance_test, cleanup_test_files,
+ tool, dump_debug_info)
+
+ def _CreateTestRunnerScript(self, options):
+ command_line_file = tempfile.NamedTemporaryFile()
+ # GTest expects argv[0] to be the executable path.
+ command_line_file.write(self.test_suite_basename + ' ' + options)
+ command_line_file.flush()
+ self.adb.PushIfNeeded(command_line_file.name,
+ constants.TEST_EXECUTABLE_DIR +
+ '/chrome-native-tests-command-line')
+
+ def _GetGTestReturnCode(self):
+ return None
+
+ def _GetFifo(self):
+ # The test.fifo path is determined by:
+ # testing/android/java/src/org/chromium/native_test/
+ # ChromeNativeTestActivity.java and
+ # testing/android/native_test_launcher.cc
+ return '/data/data/org.chromium.native_test/files/test.fifo'
+
+ def _ClearFifo(self):
+ self.adb.RunShellCommand('rm -f ' + self._GetFifo())
+
+ def _WatchFifo(self, timeout, logfile=None):
+ for i in range(10):
+ if self.adb.FileExistsOnDevice(self._GetFifo()):
+ print 'Fifo created...'
+ break
+ time.sleep(i)
+ else:
+ raise Exception('Unable to find fifo on device %s ' % self._GetFifo())
+ args = shlex.split(self.adb.Adb()._target_arg)
+ args += ['shell', 'cat', self._GetFifo()]
+ return pexpect.spawn('adb', args, timeout=timeout, logfile=logfile)
+
+ def GetAllTests(self):
+ """Returns a list of all tests available in the test suite."""
+ self._CreateTestRunnerScript('--gtest_list_tests')
+ try:
+ self.tool.SetupEnvironment()
+ # Clear and start monitoring logcat.
+ self._ClearFifo()
+ self.adb.RunShellCommand(
+ 'am start -n '
+ 'org.chromium.native_test/'
+ 'org.chromium.native_test.ChromeNativeTestActivity')
+ # Wait for native test to complete.
+ p = self._WatchFifo(timeout=30 * self.tool.GetTimeoutScale())
+ p.expect("<<ScopedMainEntryLogger")
+ p.close()
+ finally:
+ self.tool.CleanUpEnvironment()
+ # We need to strip the trailing newline.
+ content = [line.rstrip() for line in p.before.splitlines()]
+ ret = self._ParseGTestListTests(content)
+ return ret
+
+ def CreateTestRunnerScript(self, gtest_filter, test_arguments):
+ self._CreateTestRunnerScript('--gtest_filter=%s %s' % (gtest_filter,
+ test_arguments))
+
+ def RunTestsAndListResults(self):
+ try:
+ self.tool.SetupEnvironment()
+ self._ClearFifo()
+ self.adb.RunShellCommand(
+ 'am start -n '
+ 'org.chromium.native_test/'
+ 'org.chromium.native_test.ChromeNativeTestActivity')
+ finally:
+ self.tool.CleanUpEnvironment()
+ logfile = android_commands.NewLineNormalizer(sys.stdout)
+ return self._WatchTestOutput(self._WatchFifo(timeout=10, logfile=logfile))
+
+ def StripAndCopyExecutable(self):
+ # Always uninstall the previous one (by activity name); we don't
+ # know what was embedded in it.
+ self.adb.ManagedInstall(self.test_suite_full, False,
+ package_name='org.chromium.native_test')
+
+ def _GetTestSuiteBaseName(self):
+ """Returns the base name of the test suite."""
+ # APK test suite names end with '-debug.apk'
+ return os.path.basename(self.test_suite).rsplit('-debug', 1)[0]
diff --git a/media/webrtc/trunk/build/android/pylib/test_package_executable.py b/media/webrtc/trunk/build/android/pylib/test_package_executable.py
new file mode 100644
index 000000000..a11c768fb
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/test_package_executable.py
@@ -0,0 +1,167 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import os
+import shutil
+import sys
+import tempfile
+
+import cmd_helper
+import constants
+from test_package import TestPackage
+from pylib import pexpect
+
+
+class TestPackageExecutable(TestPackage):
+ """A helper class for running stand-alone executables."""
+
+ _TEST_RUNNER_RET_VAL_FILE = 'gtest_retval'
+
+ def __init__(self, adb, device, test_suite, timeout, rebaseline,
+ performance_test, cleanup_test_files, tool, dump_debug_info,
+ symbols_dir=None):
+ """
+ Args:
+ adb: ADB interface the tests are using.
+ device: Device to run the tests.
+ test_suite: A specific test suite to run, empty to run all.
+ timeout: Timeout for each test.
+ rebaseline: Whether or not to run tests in isolation and update the
+ filter.
+ performance_test: Whether or not performance test(s).
+ cleanup_test_files: Whether or not to cleanup test files on device.
+ tool: Name of the Valgrind tool.
+ dump_debug_info: A debug_info object.
+ symbols_dir: Directory to put the stripped binaries.
+ """
+ TestPackage.__init__(self, adb, device, test_suite, timeout,
+ rebaseline, performance_test, cleanup_test_files,
+ tool, dump_debug_info)
+ self.symbols_dir = symbols_dir
+
+ def _GetGTestReturnCode(self):
+ ret = None
+ ret_code = 1 # Assume failure if we can't find it
+ ret_code_file = tempfile.NamedTemporaryFile()
+ try:
+ if not self.adb.Adb().Pull(
+ self.adb.GetExternalStorage() + '/' +
+ TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE,
+ ret_code_file.name):
+ logging.critical('Unable to pull gtest ret val file %s',
+ ret_code_file.name)
+ raise ValueError
+ ret_code = file(ret_code_file.name).read()
+ ret = int(ret_code)
+ except ValueError:
+ logging.critical('Error reading gtest ret val file %s [%s]',
+ ret_code_file.name, ret_code)
+ ret = 1
+ return ret
+
+ def _AddNativeCoverageExports(self):
+ # export GCOV_PREFIX set the path for native coverage results
+ # export GCOV_PREFIX_STRIP indicates how many initial directory
+ # names to strip off the hardwired absolute paths.
+ # This value is calculated in buildbot.sh and
+ # depends on where the tree is built.
+ # Ex: /usr/local/google/code/chrome will become
+ # /code/chrome if GCOV_PREFIX_STRIP=3
+ try:
+ depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+ except KeyError:
+ logging.info('NATIVE_COVERAGE_DEPTH_STRIP is not defined: '
+ 'No native coverage.')
+ return ''
+ export_string = ('export GCOV_PREFIX="%s/gcov"\n' %
+ self.adb.GetExternalStorage())
+ export_string += 'export GCOV_PREFIX_STRIP=%s\n' % depth
+ return export_string
+
+ def GetAllTests(self):
+ """Returns a list of all tests available in the test suite."""
+ all_tests = self.adb.RunShellCommand(
+ '%s %s/%s --gtest_list_tests' %
+ (self.tool.GetTestWrapper(),
+ constants.TEST_EXECUTABLE_DIR,
+ self.test_suite_basename))
+ return self._ParseGTestListTests(all_tests)
+
+ def CreateTestRunnerScript(self, gtest_filter, test_arguments):
+ """Creates a test runner script and pushes to the device.
+
+ Args:
+ gtest_filter: A gtest_filter flag.
+ test_arguments: Additional arguments to pass to the test binary.
+ """
+ tool_wrapper = self.tool.GetTestWrapper()
+ sh_script_file = tempfile.NamedTemporaryFile()
+ # We need to capture the exit status from the script since adb shell won't
+ # propagate to us.
+ sh_script_file.write('cd %s\n'
+ '%s'
+ '%s %s/%s --gtest_filter=%s %s\n'
+ 'echo $? > %s' %
+ (constants.TEST_EXECUTABLE_DIR,
+ self._AddNativeCoverageExports(),
+ tool_wrapper, constants.TEST_EXECUTABLE_DIR,
+ self.test_suite_basename,
+ gtest_filter, test_arguments,
+ TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE))
+ sh_script_file.flush()
+ cmd_helper.RunCmd(['chmod', '+x', sh_script_file.name])
+ self.adb.PushIfNeeded(
+ sh_script_file.name,
+ constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh')
+ logging.info('Conents of the test runner script: ')
+ for line in open(sh_script_file.name).readlines():
+ logging.info(' ' + line.rstrip())
+
+ def RunTestsAndListResults(self):
+ """Runs all the tests and checks for failures.
+
+ Returns:
+ A TestResults object.
+ """
+ args = ['adb', '-s', self.device, 'shell', 'sh',
+ constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh']
+ logging.info(args)
+ p = pexpect.spawn(args[0], args[1:], logfile=sys.stdout)
+ return self._WatchTestOutput(p)
+
+ def StripAndCopyExecutable(self):
+ """Strips and copies the executable to the device."""
+ if self.tool.NeedsDebugInfo():
+ target_name = self.test_suite
+ else:
+ target_name = self.test_suite + '_' + self.device + '_stripped'
+ should_strip = True
+ if os.path.isfile(target_name):
+ logging.info('Found target file %s' % target_name)
+ target_mtime = os.stat(target_name).st_mtime
+ source_mtime = os.stat(self.test_suite).st_mtime
+ if target_mtime > source_mtime:
+ logging.info('Target mtime (%d) is newer than source (%d), assuming '
+ 'no change.' % (target_mtime, source_mtime))
+ should_strip = False
+
+ if should_strip:
+ logging.info('Did not find up-to-date stripped binary. Generating a '
+ 'new one (%s).' % target_name)
+ # Whenever we generate a stripped binary, copy to the symbols dir. If we
+ # aren't stripping a new binary, assume it's there.
+ if self.symbols_dir:
+ if not os.path.exists(self.symbols_dir):
+ os.makedirs(self.symbols_dir)
+ shutil.copy(self.test_suite, self.symbols_dir)
+ strip = os.environ['STRIP']
+ cmd_helper.RunCmd([strip, self.test_suite, '-o', target_name])
+ test_binary = constants.TEST_EXECUTABLE_DIR + '/' + self.test_suite_basename
+ self.adb.PushIfNeeded(target_name, test_binary)
+
+ def _GetTestSuiteBaseName(self):
+ """Returns the base name of the test suite."""
+ return os.path.basename(self.test_suite)
diff --git a/media/webrtc/trunk/build/android/pylib/test_result.py b/media/webrtc/trunk/build/android/pylib/test_result.py
new file mode 100644
index 000000000..31a546aca
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/test_result.py
@@ -0,0 +1,193 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import json
+import logging
+import os
+import time
+import traceback
+
+import buildbot_report
+import constants
+
+
+class BaseTestResult(object):
+ """A single result from a unit test."""
+
+ def __init__(self, name, log):
+ self.name = name
+ self.log = log.replace('\r', '')
+
+
+class SingleTestResult(BaseTestResult):
+ """Result information for a single test.
+
+ Args:
+ full_name: Full name of the test.
+ start_date: Date in milliseconds when the test began running.
+ dur: Duration of the test run in milliseconds.
+ log: An optional string listing any errors.
+ """
+
+ def __init__(self, full_name, start_date, dur, log=''):
+ BaseTestResult.__init__(self, full_name, log)
+ name_pieces = full_name.rsplit('#')
+ if len(name_pieces) > 1:
+ self.test_name = name_pieces[1]
+ self.class_name = name_pieces[0]
+ else:
+ self.class_name = full_name
+ self.test_name = full_name
+ self.start_date = start_date
+ self.dur = dur
+
+
+class TestResults(object):
+ """Results of a test run."""
+
+ def __init__(self):
+ self.ok = []
+ self.failed = []
+ self.crashed = []
+ self.unknown = []
+ self.timed_out = False
+ self.overall_fail = False
+
+ @staticmethod
+ def FromRun(ok=None, failed=None, crashed=None, timed_out=False,
+ overall_fail=False):
+ ret = TestResults()
+ ret.ok = ok or []
+ ret.failed = failed or []
+ ret.crashed = crashed or []
+ ret.timed_out = timed_out
+ ret.overall_fail = overall_fail
+ return ret
+
+ @staticmethod
+ def FromTestResults(results):
+ """Combines a list of results in a single TestResults object."""
+ ret = TestResults()
+ for t in results:
+ ret.ok += t.ok
+ ret.failed += t.failed
+ ret.crashed += t.crashed
+ ret.unknown += t.unknown
+ if t.timed_out:
+ ret.timed_out = True
+ if t.overall_fail:
+ ret.overall_fail = True
+ return ret
+
+ @staticmethod
+ def FromPythonException(test_name, start_date_ms, exc_info):
+ """Constructs a TestResults with exception information for the given test.
+
+ Args:
+ test_name: name of the test which raised an exception.
+ start_date_ms: the starting time for the test.
+ exc_info: exception info, ostensibly from sys.exc_info().
+
+ Returns:
+ A TestResults object with a SingleTestResult in the failed list.
+ """
+ exc_type, exc_value, exc_traceback = exc_info
+ trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+ exc_traceback))
+ log_msg = 'Exception:\n' + trace_info
+ duration_ms = (int(time.time()) * 1000) - start_date_ms
+
+ exc_result = SingleTestResult(
+ full_name='PythonWrapper#' + test_name,
+ start_date=start_date_ms,
+ dur=duration_ms,
+ log=(str(exc_type) + ' ' + log_msg))
+
+ results = TestResults()
+ results.failed.append(exc_result)
+ return results
+
+ def _Log(self, sorted_list):
+ for t in sorted_list:
+ logging.critical(t.name)
+ if t.log:
+ logging.critical(t.log)
+
+ def GetAllBroken(self):
+ """Returns the all broken tests including failed, crashed, unknown."""
+ return self.failed + self.crashed + self.unknown
+
+ def LogFull(self, test_group, test_suite, build_type):
+ """Output broken test logs, summarize in a log file and the test output."""
+ # Output all broken tests or 'passed' if none broken.
+ logging.critical('*' * 80)
+ logging.critical('Final result')
+ if self.failed:
+ logging.critical('Failed:')
+ self._Log(sorted(self.failed))
+ if self.crashed:
+ logging.critical('Crashed:')
+ self._Log(sorted(self.crashed))
+ if self.unknown:
+ logging.critical('Unknown:')
+ self._Log(sorted(self.unknown))
+ if not self.GetAllBroken():
+ logging.critical('Passed')
+ logging.critical('*' * 80)
+
+ # Summarize in a log file, if tests are running on bots.
+ if test_group and test_suite and os.environ.get('BUILDBOT_BUILDERNAME'):
+ log_file_path = os.path.join(constants.CHROME_DIR, 'out',
+ build_type, 'test_logs')
+ if not os.path.exists(log_file_path):
+ os.mkdir(log_file_path)
+ full_file_name = os.path.join(log_file_path, test_group)
+ if not os.path.exists(full_file_name):
+ with open(full_file_name, 'w') as log_file:
+ print >> log_file, '\n%s results for %s build %s:' % (
+ test_group, os.environ.get('BUILDBOT_BUILDERNAME'),
+ os.environ.get('BUILDBOT_BUILDNUMBER'))
+ log_contents = [' %s result : %d tests ran' % (test_suite,
+ len(self.ok) +
+ len(self.failed) +
+ len(self.crashed) +
+ len(self.unknown))]
+ content_pairs = [('passed', len(self.ok)), ('failed', len(self.failed)),
+ ('crashed', len(self.crashed))]
+ for (result, count) in content_pairs:
+ if count:
+ log_contents.append(', %d tests %s' % (count, result))
+ with open(full_file_name, 'a') as log_file:
+ print >> log_file, ''.join(log_contents)
+ content = {'test_group': test_group,
+ 'ok': [t.name for t in self.ok],
+ 'failed': [t.name for t in self.failed],
+ 'crashed': [t.name for t in self.failed],
+ 'unknown': [t.name for t in self.unknown],}
+ with open(os.path.join(log_file_path, 'results.json'), 'a') as json_file:
+ print >> json_file, json.dumps(content)
+
+ # Summarize in the test output.
+ summary_string = 'Summary:\n'
+ summary_string += 'RAN=%d\n' % (len(self.ok) + len(self.failed) +
+ len(self.crashed) + len(self.unknown))
+ summary_string += 'PASSED=%d\n' % (len(self.ok))
+ summary_string += 'FAILED=%d %s\n' % (len(self.failed),
+ [t.name for t in self.failed])
+ summary_string += 'CRASHED=%d %s\n' % (len(self.crashed),
+ [t.name for t in self.crashed])
+ summary_string += 'UNKNOWN=%d %s\n' % (len(self.unknown),
+ [t.name for t in self.unknown])
+ logging.critical(summary_string)
+ return summary_string
+
+ def PrintAnnotation(self):
+ """Print buildbot annotations for test results."""
+ if self.timed_out:
+ buildbot_report.PrintWarning()
+ elif self.failed or self.crashed or self.overall_fail:
+ buildbot_report.PrintError()
+ else:
+ print 'Step success!' # No annotation needed
diff --git a/media/webrtc/trunk/build/android/pylib/tests_annotations.py b/media/webrtc/trunk/build/android/pylib/tests_annotations.py
new file mode 100644
index 000000000..f2a183466
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/tests_annotations.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Annotations for python-driven tests."""
+
+import os
+
+
+class AnnotatedFunctions(object):
+ """A container for annotated methods."""
+ _ANNOTATED = {}
+
+ @staticmethod
+ def _AddFunction(annotation, function):
+ """Adds an annotated to function to our container.
+
+ Args:
+ annotation: the annotation string.
+ function: the function.
+ Returns:
+ The function passed in.
+ """
+ module_name = os.path.splitext(os.path.basename(
+ function.__globals__['__file__']))[0]
+ qualified_function_name = '.'.join([module_name, function.func_name])
+ function_list = AnnotatedFunctions._ANNOTATED.get(annotation, [])
+ function_list.append(qualified_function_name)
+ AnnotatedFunctions._ANNOTATED[annotation] = function_list
+ return function
+
+ @staticmethod
+ def IsAnnotated(annotation, qualified_function_name):
+ """True if function name (module.function) contains the annotation.
+
+ Args:
+ annotation: the annotation string.
+ qualified_function_name: the qualified function name.
+ Returns:
+ True if module.function contains the annotation.
+ """
+ return qualified_function_name in AnnotatedFunctions._ANNOTATED.get(
+ annotation, [])
+
+ @staticmethod
+ def GetTestAnnotations(qualified_function_name):
+ """Returns a list containing all annotations for the given function.
+
+ Args:
+ qualified_function_name: the qualified function name.
+ Returns:
+ List of all annotations for this function.
+ """
+ return [annotation
+ for annotation, tests in AnnotatedFunctions._ANNOTATED.iteritems()
+ if qualified_function_name in tests]
+
+
+# The following functions are annotations used for the python driven tests.
+def Smoke(function):
+ return AnnotatedFunctions._AddFunction('Smoke', function)
+
+
+def SmallTest(function):
+ return AnnotatedFunctions._AddFunction('SmallTest', function)
+
+
+def MediumTest(function):
+ return AnnotatedFunctions._AddFunction('MediumTest', function)
+
+
+def LargeTest(function):
+ return AnnotatedFunctions._AddFunction('LargeTest', function)
+
+
+def FlakyTest(function):
+ return AnnotatedFunctions._AddFunction('FlakyTest', function)
+
+
+def DisabledTest(function):
+ return AnnotatedFunctions._AddFunction('DisabledTest', function)
+
+
+def Feature(feature_list):
+ def _AddFeatures(function):
+ for feature in feature_list:
+ AnnotatedFunctions._AddFunction('Feature' + feature, function)
+ return AnnotatedFunctions._AddFunction('Feature', function)
+ return _AddFeatures
diff --git a/media/webrtc/trunk/build/android/pylib/valgrind_tools.py b/media/webrtc/trunk/build/android/pylib/valgrind_tools.py
new file mode 100644
index 000000000..810f6be18
--- /dev/null
+++ b/media/webrtc/trunk/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,255 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Classes in this file define additional actions that need to be taken to run a
+test under some kind of runtime error detection tool.
+
+The interface is intended to be used as follows.
+
+1. For tests that simply run a native process (i.e. no activity is spawned):
+
+Call tool.CopyFiles().
+Prepend test command line with tool.GetTestWrapper().
+
+2. For tests that spawn an activity:
+
+Call tool.CopyFiles().
+Call tool.SetupEnvironment().
+Run the test as usual.
+Call tool.CleanUpEnvironment().
+"""
+
+import os.path
+import sys
+
+from constants import CHROME_DIR
+
+
+def SetChromeTimeoutScale(adb, scale):
+ """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+ path = '/data/local/tmp/chrome_timeout_scale'
+ if not scale or scale == 1.0:
+ # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+ adb.RunShellCommand('rm %s' % path)
+ else:
+ adb.SetFileContents(path, '%f' % scale)
+
+
+class BaseTool(object):
+ """A tool that does nothing."""
+
+ def GetTestWrapper(self):
+ """Returns a string that is to be prepended to the test command line."""
+ return ''
+
+ def GetUtilWrapper(self):
+ """Returns the wrapper name for the utilities.
+
+ Returns:
+ A string that is to be prepended to the command line of utility
+ processes (forwarder, etc.).
+ """
+ return ''
+
+ def CopyFiles(self):
+ """Copies tool-specific files to the device, create directories, etc."""
+ pass
+
+ def SetupEnvironment(self):
+ """Sets up the system environment for a test.
+
+ This is a good place to set system properties.
+ """
+ pass
+
+ def CleanUpEnvironment(self):
+ """Cleans up environment."""
+ pass
+
+ def GetTimeoutScale(self):
+ """Returns a multiplier that should be applied to timeout values."""
+ return 1.0
+
+ def NeedsDebugInfo(self):
+ """Whether this tool requires debug info.
+
+ Returns:
+ True if this tool can not work with stripped binaries.
+ """
+ return False
+
+
+class AddressSanitizerTool(BaseTool):
+ """AddressSanitizer tool."""
+
+ WRAPPER_PATH = '/system/bin/asanwrapper'
+
+ def __init__(self, adb):
+ self._adb = adb
+ self._wrap_properties = ['wrap.com.google.android.apps.ch',
+ 'wrap.org.chromium.native_test']
+
+ def CopyFiles(self):
+ """Copies ASan tools to the device."""
+ files = ['system/lib/libasan_preload.so',
+ 'system/bin/asanwrapper',
+ 'system/bin/asan/app_process',
+ 'system/bin/linker']
+ android_product_out = os.environ['ANDROID_PRODUCT_OUT']
+ self._adb.MakeSystemFolderWritable()
+ for f in files:
+ self._adb.PushIfNeeded(os.path.join(android_product_out, f),
+ os.path.join('/', f))
+
+ def GetTestWrapper(self):
+ return AddressSanitizerTool.WRAPPER_PATH
+
+ def GetUtilWrapper(self):
+ """Returns the wrapper for utilities, such as forwarder.
+
+ AddressSanitizer wrapper must be added to all instrumented binaries,
+ including forwarder and the like. This can be removed if such binaries
+ were built without instrumentation. """
+ return AddressSanitizerTool.WRAPPER_PATH
+
+ def SetupEnvironment(self):
+ for prop in self._wrap_properties:
+ self._adb.RunShellCommand('setprop %s "logwrapper %s"' % (
+ prop, self.GetTestWrapper()))
+ SetChromeTimeoutScale(self._adb, self.GetTimeoutScale())
+
+ def CleanUpEnvironment(self):
+ for prop in self._wrap_properties:
+ self._adb.RunShellCommand('setprop %s ""' % (prop,))
+ SetChromeTimeoutScale(self._adb, None)
+
+ def GetTimeoutScale(self):
+ # Very slow startup.
+ return 20.0
+
+
+class ValgrindTool(BaseTool):
+ """Base abstract class for Valgrind tools."""
+
+ VG_DIR = '/data/local/tmp/valgrind'
+ VGLOGS_DIR = '/data/local/tmp/vglogs'
+
+ def __init__(self, adb):
+ self._adb = adb
+ # exactly 31 chars, SystemProperties::PROP_NAME_MAX
+ self._wrap_properties = ['wrap.com.google.android.apps.ch',
+ 'wrap.org.chromium.native_test']
+
+ def CopyFiles(self):
+ """Copies Valgrind tools to the device."""
+ self._adb.RunShellCommand('rm -r %s; mkdir %s' %
+ (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
+ self._adb.RunShellCommand('rm -r %s; mkdir %s' %
+ (ValgrindTool.VGLOGS_DIR,
+ ValgrindTool.VGLOGS_DIR))
+ files = self.GetFilesForTool()
+ for f in files:
+ self._adb.PushIfNeeded(os.path.join(CHROME_DIR, f),
+ os.path.join(ValgrindTool.VG_DIR,
+ os.path.basename(f)))
+
+ def SetupEnvironment(self):
+ """Sets up device environment."""
+ self._adb.RunShellCommand('chmod 777 /data/local/tmp')
+ for prop in self._wrap_properties:
+ self._adb.RunShellCommand('setprop %s "logwrapper %s"' % (
+ prop, self.GetTestWrapper()))
+ SetChromeTimeoutScale(self._adb, self.GetTimeoutScale())
+
+ def CleanUpEnvironment(self):
+ """Cleans up device environment."""
+ for prop in self._wrap_properties:
+ self._adb.RunShellCommand('setprop %s ""' % (prop,))
+ SetChromeTimeoutScale(self._adb, None)
+
+ def GetFilesForTool(self):
+ """Returns a list of file names for the tool."""
+ raise NotImplementedError()
+
+ def NeedsDebugInfo(self):
+ """Whether this tool requires debug info.
+
+ Returns:
+ True if this tool can not work with stripped binaries.
+ """
+ return True
+
+
+class MemcheckTool(ValgrindTool):
+ """Memcheck tool."""
+
+ def __init__(self, adb):
+ super(MemcheckTool, self).__init__(adb)
+
+ def GetFilesForTool(self):
+ """Returns a list of file names for the tool."""
+ return ['tools/valgrind/android/vg-chrome-wrapper.sh',
+ 'tools/valgrind/memcheck/suppressions.txt',
+ 'tools/valgrind/memcheck/suppressions_android.txt']
+
+ def GetTestWrapper(self):
+ """Returns a string that is to be prepended to the test command line."""
+ return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
+
+ def GetTimeoutScale(self):
+ """Returns a multiplier that should be applied to timeout values."""
+ return 30
+
+
+class TSanTool(ValgrindTool):
+ """ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
+
+ def __init__(self, adb):
+ super(TSanTool, self).__init__(adb)
+
+ def GetFilesForTool(self):
+ """Returns a list of file names for the tool."""
+ return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
+ 'tools/valgrind/tsan/suppressions.txt',
+ 'tools/valgrind/tsan/suppressions_android.txt',
+ 'tools/valgrind/tsan/ignores.txt']
+
+ def GetTestWrapper(self):
+ """Returns a string that is to be prepended to the test command line."""
+ return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
+
+ def GetTimeoutScale(self):
+ """Returns a multiplier that should be applied to timeout values."""
+ return 30.0
+
+
+TOOL_REGISTRY = {
+ 'memcheck': lambda x: MemcheckTool(x),
+ 'memcheck-renderer': lambda x: MemcheckTool(x),
+ 'tsan': lambda x: TSanTool(x),
+ 'tsan-renderer': lambda x: TSanTool(x),
+ 'asan': lambda x: AddressSanitizerTool(x),
+}
+
+
+def CreateTool(tool_name, adb):
+ """Creates a tool with the specified tool name.
+
+ Args:
+ tool_name: Name of the tool to create.
+ adb: ADB interface the tool will use.
+ Returns:
+ A tool for the specified tool_name.
+ """
+ if not tool_name:
+ return BaseTool()
+
+ ctor = TOOL_REGISTRY.get(tool_name)
+ if ctor:
+ return ctor(adb)
+ else:
+ print 'Unknown tool %s, available tools: %s' % (
+ tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+ sys.exit(1)
diff --git a/media/webrtc/trunk/build/android/run_instrumentation_tests.py b/media/webrtc/trunk/build/android/run_instrumentation_tests.py
new file mode 100755
index 000000000..37910c964
--- /dev/null
+++ b/media/webrtc/trunk/build/android/run_instrumentation_tests.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs both the Python and Java tests."""
+
+import optparse
+import sys
+import time
+
+from pylib import apk_info
+from pylib import buildbot_report
+from pylib import ports
+from pylib import run_java_tests
+from pylib import run_python_tests
+from pylib import run_tests_helper
+from pylib import test_options_parser
+from pylib.test_result import TestResults
+
+
+def SummarizeResults(java_results, python_results, annotation, build_type):
+ """Summarize the results from the various test types.
+
+ Args:
+ java_results: a TestResults object with java test case results.
+ python_results: a TestResults object with python test case results.
+ annotation: the annotation used for these results.
+ build_type: 'Release' or 'Debug'.
+
+ Returns:
+ A tuple (all_results, summary_string, num_failing)
+ """
+ all_results = TestResults.FromTestResults([java_results, python_results])
+ summary_string = all_results.LogFull('Instrumentation', annotation,
+ build_type)
+ num_failing = (len(all_results.failed) + len(all_results.crashed) +
+ len(all_results.unknown))
+ return all_results, summary_string, num_failing
+
+
+def DispatchInstrumentationTests(options):
+ """Dispatches the Java and Python instrumentation tests, sharding if possible.
+
+ Uses the logging module to print the combined final results and
+ summary of the Java and Python tests. If the java_only option is set, only
+ the Java tests run. If the python_only option is set, only the python tests
+ run. If neither are set, run both Java and Python tests.
+
+ Args:
+ options: command-line options for running the Java and Python tests.
+
+ Returns:
+ An integer representing the number of failing tests.
+ """
+ # Reset the test port allocation. It's important to do it before starting
+ # to dispatch any tests.
+ if not ports.ResetTestServerPortAllocation():
+ raise Exception('Failed to reset test server port.')
+ start_date = int(time.time() * 1000)
+ java_results = TestResults()
+ python_results = TestResults()
+
+ if options.run_java_tests:
+ java_results = run_java_tests.DispatchJavaTests(
+ options,
+ [apk_info.ApkInfo(options.test_apk_path, options.test_apk_jar_path)])
+ if options.run_python_tests:
+ python_results = run_python_tests.DispatchPythonTests(options)
+
+ all_results, summary_string, num_failing = SummarizeResults(
+ java_results, python_results, options.annotation, options.build_type)
+ return num_failing
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ test_options_parser.AddInstrumentationOptions(option_parser)
+ options, args = option_parser.parse_args(argv)
+ test_options_parser.ValidateInstrumentationOptions(option_parser, options,
+ args)
+
+ run_tests_helper.SetLogLevel(options.verbose_count)
+ buildbot_report.PrintNamedStep(
+ 'Instrumentation tests: %s - %s' % (', '.join(options.annotation),
+ options.test_apk))
+ return DispatchInstrumentationTests(options)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/android/run_tests.py b/media/webrtc/trunk/build/android/run_tests.py
new file mode 100755
index 000000000..8a8bd2695
--- /dev/null
+++ b/media/webrtc/trunk/build/android/run_tests.py
@@ -0,0 +1,450 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all the native unit tests.
+
+1. Copy over test binary to /data/local on device.
+2. Resources: chrome/unit_tests requires resources (chrome.pak and en-US.pak)
+ to be deployed to the device. We use the device's $EXTERNAL_STORAGE as the
+ base dir (which maps to Context.getExternalFilesDir()).
+3. Environment:
+3.1. chrome/unit_tests requires (via chrome_paths.cc) a directory named:
+ $EXTERNAL_STORAGE + /chrome/test/data
+3.2. page_cycler_tests have following requirements,
+3.2.1 the following data on host:
+ <chrome_src_dir>/tools/page_cycler
+ <chrome_src_dir>/data/page_cycler
+3.2.2. two data directories to store above test data on device named:
+ $EXTERNAL_STORAGE + /tools/ (for database perf test)
+ $EXTERNAL_STORAGE + /data/ (for other perf tests)
+3.2.3. a http server to serve http perf tests.
+ The http root is host's <chrome_src_dir>/data/page_cycler/, port 8000.
+3.2.4 a tool named forwarder is also required to run on device to
+ forward the http request/response between host and device.
+3.2.5 Chrome is installed on device.
+4. Run the binary in the device and stream the log to the host.
+4.1. Optionally, filter specific tests.
+4.2. Optionally, rebaseline: run the available tests and update the
+ suppressions file for failures.
+4.3. If we're running a single test suite and we have multiple devices
+ connected, we'll shard the tests.
+5. Clean up the device.
+
+Suppressions:
+
+Individual tests in a test binary can be suppressed by listing it in
+the gtest_filter directory in a file of the same name as the test binary,
+one test per line. Here is an example:
+
+ $ cat gtest_filter/base_unittests_disabled
+ DataPackTest.Load
+ ReadOnlyFileUtilTest.ContentsEqual
+
+This file is generated by the tests running on devices. If running on emulator,
+additonal filter file which lists the tests only failed in emulator will be
+loaded. We don't care about the rare testcases which succeeded on emuatlor, but
+failed on device.
+"""
+
+import fnmatch
+import logging
+import optparse
+import os
+import signal
+import subprocess
+import sys
+import time
+
+from pylib import android_commands
+from pylib.base_test_sharder import BaseTestSharder
+from pylib import buildbot_report
+from pylib import constants
+from pylib import debug_info
+import emulator
+from pylib import ports
+from pylib import run_tests_helper
+from pylib import test_options_parser
+from pylib.single_test_runner import SingleTestRunner
+from pylib.test_result import BaseTestResult, TestResults
+
+
+_TEST_SUITES = ['base_unittests',
+ 'content_unittests',
+ 'gpu_unittests',
+ 'ipc_tests',
+ 'media_unittests',
+ 'net_unittests',
+ 'sql_unittests',
+ 'sync_unit_tests',
+ 'ui_unittests',
+ 'unit_tests',
+ ]
+
+
+def TestSuiteDir(build_type):
+ """Return the base directory of test suites."""
+ return os.path.abspath(os.path.join(constants.CHROME_DIR, 'out', build_type))
+
+def FullyQualifiedTestSuites(exe, option_test_suite, build_type):
+ """Return a fully qualified list
+
+ Args:
+ exe: if True, use the executable-based test runner.
+ option_test_suite: the test_suite specified as an option.
+ build_type: 'Release' or 'Debug'.
+ """
+ test_suite_dir = TestSuiteDir(build_type)
+ if option_test_suite:
+ all_test_suites = [option_test_suite]
+ else:
+ all_test_suites = _TEST_SUITES
+
+ if exe:
+ qualified_test_suites = [os.path.join(test_suite_dir, t)
+ for t in all_test_suites]
+ else:
+ # out/(Debug|Release)/$SUITE_apk/$SUITE-debug.apk
+ qualified_test_suites = [os.path.join(test_suite_dir,
+ t + '_apk',
+ t + '-debug.apk')
+ for t in all_test_suites]
+ for t, q in zip(all_test_suites, qualified_test_suites):
+ if not os.path.exists(q):
+ logging.critical('Test suite %s not found in %s.\n'
+ 'Supported test suites:\n %s\n'
+ 'Ensure it has been built.\n',
+ t, q, _TEST_SUITES)
+ return []
+ return qualified_test_suites
+
+
+class TimeProfile(object):
+ """Class for simple profiling of action, with logging of cost."""
+
+ def __init__(self, description):
+ self._description = description
+ self.Start()
+
+ def Start(self):
+ self._starttime = time.time()
+
+ def Stop(self):
+ """Stop profiling and dump a log."""
+ if self._starttime:
+ stoptime = time.time()
+ logging.info('%fsec to perform %s',
+ stoptime - self._starttime, self._description)
+ self._starttime = None
+
+
+class Xvfb(object):
+ """Class to start and stop Xvfb if relevant. Nop if not Linux."""
+
+ def __init__(self):
+ self._pid = 0
+
+ def _IsLinux(self):
+ """Return True if on Linux; else False."""
+ return sys.platform.startswith('linux')
+
+ def Start(self):
+ """Start Xvfb and set an appropriate DISPLAY environment. Linux only.
+
+ Copied from tools/code_coverage/coverage_posix.py
+ """
+ if not self._IsLinux():
+ return
+ proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+ '-ac'],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ self._pid = proc.pid
+ if not self._pid:
+ raise Exception('Could not start Xvfb')
+ os.environ['DISPLAY'] = ':9'
+
+ # Now confirm, giving a chance for it to start if needed.
+ for _ in range(10):
+ proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+ _, retcode = os.waitpid(proc.pid, 0)
+ if retcode == 0:
+ break
+ time.sleep(0.25)
+ if retcode != 0:
+ raise Exception('Could not confirm Xvfb happiness')
+
+ def Stop(self):
+ """Stop Xvfb if needed. Linux only."""
+ if self._pid:
+ try:
+ os.kill(self._pid, signal.SIGKILL)
+ except:
+ pass
+ del os.environ['DISPLAY']
+ self._pid = 0
+
+
+class TestSharder(BaseTestSharder):
+ """Responsible for sharding the tests on the connected devices."""
+
+ def __init__(self, attached_devices, test_suite, gtest_filter,
+ test_arguments, timeout, rebaseline, performance_test,
+ cleanup_test_files, tool, log_dump_name, fast_and_loose,
+ build_type):
+ BaseTestSharder.__init__(self, attached_devices)
+ self.test_suite = test_suite
+ self.test_suite_basename = os.path.basename(test_suite)
+ self.gtest_filter = gtest_filter or ''
+ self.test_arguments = test_arguments
+ self.timeout = timeout
+ self.rebaseline = rebaseline
+ self.performance_test = performance_test
+ self.cleanup_test_files = cleanup_test_files
+ self.tool = tool
+ self.log_dump_name = log_dump_name
+ self.fast_and_loose = fast_and_loose
+ self.build_type = build_type
+ test = SingleTestRunner(self.attached_devices[0], test_suite, gtest_filter,
+ test_arguments, timeout, rebaseline,
+ performance_test, cleanup_test_files, tool, 0,
+ not not self.log_dump_name, fast_and_loose,
+ build_type)
+ self.tests = []
+ if not self.gtest_filter:
+ # No filter has been specified, let's add all tests then.
+ # The executable/apk needs to be copied before we can call GetAllTests.
+ test.test_package.StripAndCopyExecutable()
+ all_tests = test.test_package.GetAllTests()
+ if not rebaseline:
+ disabled_list = test.GetDisabledTests()
+ # Only includes tests that do not have any match in the disabled list.
+ all_tests = filter(lambda t:
+ not any([fnmatch.fnmatch(t, disabled_pattern)
+ for disabled_pattern in disabled_list]),
+ all_tests)
+ self.tests = all_tests
+
+ def CreateShardedTestRunner(self, device, index):
+ """Creates a suite-specific test runner.
+
+ Args:
+ device: Device serial where this shard will run.
+ index: Index of this device in the pool.
+
+ Returns:
+ A SingleTestRunner object.
+ """
+ device_num = len(self.attached_devices)
+ shard_size = (len(self.tests) + device_num - 1) / device_num
+ shard_test_list = self.tests[index * shard_size : (index + 1) * shard_size]
+ test_filter = ':'.join(shard_test_list) + self.gtest_filter
+ return SingleTestRunner(device, self.test_suite,
+ test_filter, self.test_arguments, self.timeout,
+ self.rebaseline, self.performance_test,
+ self.cleanup_test_files, self.tool, index,
+ not not self.log_dump_name, self.fast_and_loose,
+ self.build_type)
+
+ def OnTestsCompleted(self, test_runners, test_results):
+ """Notifies that we completed the tests."""
+ test_results.LogFull('Unit test', os.path.basename(self.test_suite),
+ self.build_type)
+ test_results.PrintAnnotation()
+ if test_results.failed and self.rebaseline:
+ test_runners[0].UpdateFilter(test_results.failed)
+ if self.log_dump_name:
+ # Zip all debug info outputs into a file named by log_dump_name.
+ debug_info.GTestDebugInfo.ZipAndCleanResults(
+ os.path.join(TestSuiteDir(self.build_type), 'debug_info_dumps'),
+ self.log_dump_name)
+
+
+def _RunATestSuite(options):
+ """Run a single test suite.
+
+ Helper for Dispatch() to allow stop/restart of the emulator across
+ test bundles. If using the emulator, we start it on entry and stop
+ it on exit.
+
+ Args:
+ options: options for running the tests.
+
+ Returns:
+ 0 if successful, number of failing tests otherwise.
+ """
+ step_name = os.path.basename(options.test_suite).replace('-debug.apk', '')
+ buildbot_report.PrintNamedStep(step_name)
+ attached_devices = []
+ buildbot_emulators = []
+
+ if options.use_emulator:
+ for n in range(options.emulator_count):
+ t = TimeProfile('Emulator launch %d' % n)
+ avd_name = None
+ if n > 0:
+ # Creates a temporary AVD for the extra emulators.
+ avd_name = 'run_tests_avd_%d' % n
+ buildbot_emulator = emulator.Emulator(avd_name, options.fast_and_loose)
+ buildbot_emulator.Launch(kill_all_emulators=n == 0)
+ t.Stop()
+ buildbot_emulators.append(buildbot_emulator)
+ attached_devices.append(buildbot_emulator.device)
+ # Wait for all emulators to boot completed.
+ map(lambda buildbot_emulator: buildbot_emulator.ConfirmLaunch(True),
+ buildbot_emulators)
+ elif options.test_device:
+ attached_devices = [options.test_device]
+ else:
+ attached_devices = android_commands.GetAttachedDevices()
+
+ if not attached_devices:
+ logging.critical('A device must be attached and online.')
+ buildbot_report.PrintError()
+ return 1
+
+ # Reset the test port allocation. It's important to do it before starting
+ # to dispatch any tests.
+ if not ports.ResetTestServerPortAllocation():
+ raise Exception('Failed to reset test server port.')
+
+ if options.performance_test or options.gtest_filter:
+ # These configuration can't be split in multiple devices.
+ attached_devices = [attached_devices[0]]
+ sharder = TestSharder(attached_devices, options.test_suite,
+ options.gtest_filter, options.test_arguments,
+ options.timeout, options.rebaseline,
+ options.performance_test,
+ options.cleanup_test_files, options.tool,
+ options.log_dump, options.fast_and_loose,
+ options.build_type)
+ test_results = sharder.RunShardedTests()
+
+ for buildbot_emulator in buildbot_emulators:
+ buildbot_emulator.Shutdown()
+
+ # Another chance if we timed out? At this point It is safe(r) to
+ # run fast and loose since we just uploaded all the test data and
+ # binary.
+ if test_results.timed_out and options.repeat:
+ logging.critical('Timed out; repeating in fast_and_loose mode.')
+ options.fast_and_loose = True
+ options.repeat -= 1
+ logging.critical('Repeats left: ' + str(options.repeat))
+ return _RunATestSuite(options)
+ return len(test_results.failed)
+
+
+def Dispatch(options):
+ """Dispatches the tests, sharding if possible.
+
+ If options.use_emulator is True, all tests will be run in new emulator
+ instance.
+
+ Args:
+ options: options for running the tests.
+
+ Returns:
+ 0 if successful, number of failing tests otherwise.
+ """
+ if options.test_suite == 'help':
+ ListTestSuites()
+ return 0
+
+ if options.use_xvfb:
+ xvfb = Xvfb()
+ xvfb.Start()
+
+ all_test_suites = FullyQualifiedTestSuites(options.exe, options.test_suite,
+ options.build_type)
+ failures = 0
+ for suite in all_test_suites:
+ options.test_suite = suite
+ failures += _RunATestSuite(options)
+
+ if options.use_xvfb:
+ xvfb.Stop()
+ return failures
+
+
+def ListTestSuites():
+ """Display a list of available test suites."""
+ print 'Available test suites are:'
+ for test_suite in _TEST_SUITES:
+ print test_suite
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ test_options_parser.AddTestRunnerOptions(option_parser, default_timeout=0)
+ option_parser.add_option('-s', '--suite', dest='test_suite',
+ help='Executable name of the test suite to run '
+ '(use -s help to list them)')
+ option_parser.add_option('-d', '--device', dest='test_device',
+ help='Target device the test suite to run ')
+ option_parser.add_option('-r', dest='rebaseline',
+ help='Rebaseline and update *testsuite_disabled',
+ action='store_true')
+ option_parser.add_option('-f', '--gtest_filter', dest='gtest_filter',
+ help='gtest filter')
+ option_parser.add_option('-a', '--test_arguments', dest='test_arguments',
+ help='Additional arguments to pass to the test')
+ option_parser.add_option('-p', dest='performance_test',
+ help='Indicator of performance test',
+ action='store_true')
+ option_parser.add_option('-L', dest='log_dump',
+ help='file name of log dump, which will be put in '
+ 'subfolder debug_info_dumps under the same '
+ 'directory in where the test_suite exists.')
+ option_parser.add_option('-e', '--emulator', dest='use_emulator',
+ action='store_true',
+ help='Run tests in a new instance of emulator')
+ option_parser.add_option('-n', '--emulator_count',
+ type='int', default=1,
+ help='Number of emulators to launch for running the '
+ 'tests.')
+ option_parser.add_option('-x', '--xvfb', dest='use_xvfb',
+ action='store_true',
+ help='Use Xvfb around tests (ignored if not Linux)')
+ option_parser.add_option('--fast', '--fast_and_loose', dest='fast_and_loose',
+ action='store_true',
+ help='Go faster (but be less stable), '
+ 'for quick testing. Example: when tracking down '
+ 'tests that hang to add to the disabled list, '
+ 'there is no need to redeploy the test binary '
+ 'or data to the device again. '
+ 'Don\'t use on bots by default!')
+ option_parser.add_option('--repeat', dest='repeat', type='int',
+ default=2,
+ help='Repeat count on test timeout')
+ option_parser.add_option('--exit_code', action='store_true',
+ help='If set, the exit code will be total number '
+ 'of failures.')
+ option_parser.add_option('--exe', action='store_true',
+ help='If set, use the exe test runner instead of '
+ 'the APK.')
+
+ options, args = option_parser.parse_args(argv)
+ if len(args) > 1:
+ print 'Unknown argument:', args[1:]
+ option_parser.print_usage()
+ sys.exit(1)
+ run_tests_helper.SetLogLevel(options.verbose_count)
+ emulator.DeleteAllTempAVDs()
+ failed_tests_count = Dispatch(options)
+
+ # Failures of individual test suites are communicated by printing a
+ # STEP_FAILURE message.
+ # Returning a success exit status also prevents the buildbot from incorrectly
+ # marking the last suite as failed if there were failures in other suites in
+ # the batch (this happens because the exit status is a sum of all failures
+ # from all suites, but the buildbot associates the exit status only with the
+ # most recent step).
+ if options.exit_code:
+ return failed_tests_count
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/apk_test.gypi b/media/webrtc/trunk/build/apk_test.gypi
new file mode 100644
index 000000000..eefcbd7fd
--- /dev/null
+++ b/media/webrtc/trunk/build/apk_test.gypi
@@ -0,0 +1,75 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK based test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'test_suite_name_apk',
+# 'type': 'none',
+# 'variables': {
+# 'test_suite_name': 'test_suite_name', # string
+# 'input_shlib_path' : '/path/to/test_suite.so', # string
+# 'input_jars_paths': ['/path/to/test_suite.jar', ... ], # list
+# },
+# 'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+ 'variables': {
+ 'input_jars_paths': [
+ # Needed by ChromeNativeTestActivity.java.
+ '<(PRODUCT_DIR)/lib.java/chromium_base.jar',
+ ],
+ },
+ 'target_conditions': [
+ ['_toolset == "target"', {
+ 'conditions': [
+ ['OS == "android" and gtest_target_type == "shared_library"', {
+ 'actions': [{
+ 'action_name': 'apk_<(test_suite_name)',
+ 'message': 'Building <(test_suite_name) test apk.',
+ 'inputs': [
+ '<(DEPTH)/testing/android/AndroidManifest.xml',
+ '<(DEPTH)/testing/android/generate_native_test.py',
+ '<(input_shlib_path)',
+ '>@(input_jars_paths)',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/<(test_suite_name)_apk/<(test_suite_name)-debug.apk',
+ ],
+ 'action': [
+ '<(DEPTH)/testing/android/generate_native_test.py',
+ '--native_library',
+ '<(input_shlib_path)',
+ '--jars',
+ '">@(input_jars_paths)"',
+ '--output',
+ '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+ '--strip-binary=<(android_strip)',
+ '--app_abi',
+ '<(android_app_abi)',
+ '--ant-args',
+ '-DPRODUCT_DIR=<(ant_build_out)',
+ '--ant-args',
+ '-DANDROID_SDK=<(android_sdk)',
+ '--ant-args',
+ '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+ '--ant-args',
+ '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+ '--ant-args',
+ '-DANDROID_SDK_VERSION=<(android_sdk_version)',
+ '--ant-args',
+ '-DANDROID_GDBSERVER=<(android_gdbserver)',
+ '--ant-args',
+ '-DCHROMIUM_SRC=<(ant_build_out)/../..',
+ ],
+ }],
+ }], # 'OS == "android" and gtest_target_type == "shared_library"
+ ], # conditions
+ }],
+ ], # target_conditions
+}
diff --git a/media/webrtc/trunk/build/apply_locales.py b/media/webrtc/trunk/build/apply_locales.py
new file mode 100755
index 000000000..6af7280fa
--- /dev/null
+++ b/media/webrtc/trunk/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+ parser = optparse.OptionParser()
+ usage = 'usage: %s [options ...] format_string locale_list'
+ parser.set_usage(usage.replace('%s', '%prog'))
+ parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+ default=False,
+ help='map "en-US" to "en" and "-" to "_" in locales')
+
+ (options, arglist) = parser.parse_args(argv)
+
+ if len(arglist) < 3:
+ print 'ERROR: need string and list of locales'
+ return 1
+
+ str_template = arglist[1]
+ locales = arglist[2:]
+
+ results = []
+ for locale in locales:
+ # For Cocoa to find the locale at runtime, it needs to use '_' instead
+ # of '-' (http://crbug.com/20441). Also, 'en-US' should be represented
+ # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+ if options.dash_to_underscore:
+ if locale == 'en-US':
+ locale = 'en'
+ locale = locale.replace('-', '_')
+ results.append(str_template.replace('ZZLOCALE', locale))
+
+ # Quote each element so filename spaces don't mess up GYP's attempt to parse
+ # it into a list.
+ print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/asan.saves b/media/webrtc/trunk/build/asan.saves
new file mode 100644
index 000000000..0c4e4ed08
--- /dev/null
+++ b/media/webrtc/trunk/build/asan.saves
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file lists symbols that should not be stripped by Xcode from the binaries
+# built for Mac OS X using AddressSanitizer
+# (http://dev.chromium.org/developers/testing/addresssanitizer).
+
+___asan_init
+___asan_handle_no_return
+___asan_register_global
+___asan_register_globals
+___asan_unregister_globals
+___asan_report_load1
+___asan_report_load2
+___asan_report_load4
+___asan_report_load8
+___asan_report_load16
+___asan_report_store1
+___asan_report_store2
+___asan_report_store4
+___asan_report_store8
+___asan_report_store16
diff --git a/media/webrtc/trunk/build/branding_value.sh b/media/webrtc/trunk/build/branding_value.sh
new file mode 100755
index 000000000..9fcb550ca
--- /dev/null
+++ b/media/webrtc/trunk/build/branding_value.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a wrapper for fetching values from the BRANDING files. Pass the
+# value of GYP's branding variable followed by the key you want and the right
+# file is checked.
+#
+# branding_value.sh Chromium COPYRIGHT
+# branding_value.sh Chromium PRODUCT_FULLNAME
+#
+
+set -e
+
+if [ $# -ne 2 ] ; then
+ echo "error: expect two arguments, branding and key" >&2
+ exit 1
+fi
+
+BUILD_BRANDING=$1
+THE_KEY=$2
+
+pushd $(dirname "${0}") > /dev/null
+BUILD_DIR=$(pwd)
+popd > /dev/null
+
+TOP="${BUILD_DIR}/.."
+
+case ${BUILD_BRANDING} in
+ Chromium)
+ BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
+ ;;
+ Chrome)
+ BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
+ ;;
+ *)
+ echo "error: unknown branding: ${BUILD_BRANDING}" >&2
+ exit 1
+ ;;
+esac
+
+BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
+
+if [ -z "${BRANDING_VALUE}" ] ; then
+ echo "error: failed to find key '${THE_KEY}'" >&2
+ exit 1
+fi
+
+echo "${BRANDING_VALUE}"
diff --git a/media/webrtc/trunk/build/build_config.h b/media/webrtc/trunk/build/build_config.h
new file mode 100644
index 000000000..55ed38c2c
--- /dev/null
+++ b/media/webrtc/trunk/build/build_config.h
@@ -0,0 +1,201 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+// Operating System:
+// OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX)
+// Compiler:
+// COMPILER_MSVC / COMPILER_GCC
+// Processor:
+// ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+// ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+#if defined(__APPLE__)
+#include <TargetConditionals.h>
+#endif
+
+// A set of macros to use for platform detection.
+#if defined(__APPLE__)
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__native_client__)
+#define OS_NACL 1
+#elif defined(__linux__)
+#define OS_LINUX 1
+// Use TOOLKIT_GTK on linux if TOOLKIT_VIEWS isn't defined.
+#if !defined(TOOLKIT_VIEWS)
+#define TOOLKIT_GTK
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#define TOOLKIT_VIEWS 1
+#elif defined(__DragonFly__)
+#define OS_DRAGONFLY 1
+#define TOOLKIT_GTK
+#elif defined(__FreeBSD__) || defined(__FreeBSD_kernel__)
+#define OS_FREEBSD 1
+#define TOOLKIT_GTK
+#elif defined(__NetBSD__)
+#define OS_NETBSD 1
+#define TOOLKIT_GTK
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#define TOOLKIT_GTK
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#define TOOLKIT_GTK
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+
+#if defined(USE_OPENSSL) && defined(USE_NSS)
+#error Cannot use both OpenSSL and NSS
+#endif
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_DRAGONFLY) || defined(OS_FREEBSD) \
+ || defined(OS_NETBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_BSD) || \
+ defined(OS_SOLARIS) || defined(OS_ANDROID) || defined(OS_NACL)
+#define OS_POSIX 1
+#endif
+
+#if defined(OS_POSIX) && !defined(OS_MACOSX) && !defined(OS_ANDROID) && \
+ !defined(OS_NACL)
+#define USE_X11 1 // Use X for graphics.
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX)) && !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection. For more info on what's defined, see:
+// http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+// http://www.agner.org/optimize/calling_conventions.pdf
+// or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__)
+#define ARCH_CPU_32_BITS 1
+#elif defined(__MIPSEL__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__powerpc64__)
+#define ARCH_CPU_PPC_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__ppc__) || defined(__powerpc__)
+#define ARCH_CPU_PPC_FAMILY 1
+#define ARCH_CPU_PPC 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__sparc64__)
+#define ARCH_CPU_SPARC_FAMILY 1
+#define ARCH_CPU_SPARC 1
+#define ARCH_CPU_64_BITS 1
+#elif defined(__sparc__)
+#define ARCH_CPU_SPARC_FAMILY 1
+#define ARCH_CPU_SPARC 1
+#define ARCH_CPU_32_BITS 1
+#elif defined(__mips__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS 1
+#define ARCH_CPU_32_BITS 1
+#elif defined(__hppa__)
+#define ARCH_CPU_HPPA 1
+#define ARCH_CPU_32_BITS 1
+#elif defined(__ia64__)
+#define ARCH_CPU_IA64 1
+#define ARCH_CPU_64_BITS 1
+#elif defined(__s390x__)
+#define ARCH_CPU_S390X 1
+#define ARCH_CPU_64_BITS 1
+#elif defined(__s390__)
+#define ARCH_CPU_S390 1
+#define ARCH_CPU_32_BITS 1
+#elif defined(__alpha__)
+#define ARCH_CPU_ALPHA 1
+#define ARCH_CPU_64_BITS 1
+#elif defined(__aarch64__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+ defined(__WCHAR_MAX__) && \
+ (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+ defined(__WCHAR_MAX__) && \
+ (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(__ARMEL__) && !defined(OS_IOS)
+#define WCHAR_T_IS_UNSIGNED 1
+#elif defined(__MIPSEL__)
+#define WCHAR_T_IS_UNSIGNED 0
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif // BUILD_BUILD_CONFIG_H_
diff --git a/media/webrtc/trunk/build/common.croc b/media/webrtc/trunk/build/common.croc
new file mode 100644
index 000000000..7281bb4eb
--- /dev/null
+++ b/media/webrtc/trunk/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+# croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+ # List of root directories, applied in order
+ 'roots' : [
+ # Sub-paths we specifically care about and want to call out
+ {
+ 'root' : '_/src',
+ 'altname' : 'CHROMIUM',
+ },
+ ],
+
+ # List of rules, applied in order
+ # Note that any 'include':0 rules here will be overridden by the 'include':1
+ # rules in the platform-specific configs.
+ 'rules' : [
+ # Don't scan for executable lines in uninstrumented C++ header files
+ {
+ 'regexp' : '.*\\.(h|hpp)$',
+ 'add_if_missing' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '',
+ 'group' : 'source',
+ },
+ {
+ 'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+ 'group' : 'test',
+ },
+
+ # Languages
+ {
+ 'regexp' : '.*\\.(c|h)$',
+ 'language' : 'C',
+ },
+ {
+ 'regexp' : '.*\\.(cc|cpp|hpp)$',
+ 'language' : 'C++',
+ },
+
+ # Files/paths to include. Specify these before the excludes, since rules
+ # are in order.
+ {
+ 'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+ 'include' : 1,
+ },
+ # Don't include subversion or mercurial SCM dirs
+ {
+ 'regexp' : '.*/(\\.svn|\\.hg)/',
+ 'include' : 0,
+ },
+ # Don't include output dirs
+ {
+ 'regexp' : '.*/(Debug|Release|sconsbuild|out|xcodebuild)/',
+ 'include' : 0,
+ },
+ # Don't include third-party source
+ {
+ 'regexp' : '.*/third_party/',
+ 'include' : 0,
+ },
+ # We don't run the V8 test suite, so we don't care about V8 coverage.
+ {
+ 'regexp' : '.*/v8/',
+ 'include' : 0,
+ },
+ ],
+
+ # Paths to add source from
+ 'add_files' : [
+ 'CHROMIUM'
+ ],
+
+ # Statistics to print
+ 'print_stats' : [
+ {
+ 'stat' : 'files_executable',
+ 'format' : '*RESULT FilesKnown: files_executable= %d files',
+ },
+ {
+ 'stat' : 'files_instrumented',
+ 'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+ },
+ {
+ 'stat' : '100.0 * files_instrumented / files_executable',
+ 'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g',
+ },
+ {
+ 'stat' : 'lines_executable',
+ 'format' : '*RESULT LinesKnown: lines_known= %d lines',
+ },
+ {
+ 'stat' : 'lines_instrumented',
+ 'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+ },
+ {
+ 'stat' : 'lines_covered',
+ 'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+ 'group' : 'source',
+ },
+ {
+ 'stat' : 'lines_covered',
+ 'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+ 'group' : 'test',
+ },
+ {
+ 'stat' : '100.0 * lines_covered / lines_executable',
+ 'format' : '*RESULT PercentCovered: percent_covered= %g',
+ },
+ {
+ 'stat' : '100.0 * lines_covered / lines_executable',
+ 'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g',
+ 'group' : 'source',
+ },
+ {
+ 'stat' : '100.0 * lines_covered / lines_executable',
+ 'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g',
+ 'group' : 'test',
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/common.gypi b/media/webrtc/trunk/build/common.gypi
new file mode 100644
index 000000000..5382cdf41
--- /dev/null
+++ b/media/webrtc/trunk/build/common.gypi
@@ -0,0 +1,3669 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# IMPORTANT:
+# Please don't directly include this file if you are building via gyp_chromium,
+# since gyp_chromium is automatically forcing its inclusion.
+{
+ # Variables expected to be overriden on the GYP command line (-D) or by
+ # ~/.gyp/include.gypi.
+ 'variables': {
+ # Putting a variables dict inside another variables dict looks kind of
+ # weird. This is done so that 'host_arch', 'chromeos', etc are defined as
+ # variables within the outer variables dict here. This is necessary
+ # to get these variables defined for the conditions within this variables
+ # dict that operate on these variables.
+ 'variables': {
+ 'variables': {
+ 'variables': {
+ 'variables': {
+ # Whether we're building a ChromeOS build.
+ 'chromeos%': 0,
+
+ # Whether or not we are using the Aura windowing framework.
+ 'use_aura%': 0,
+
+ # Whether or not we are building the Ash shell.
+ 'use_ash%': 0,
+ },
+ # Copy conditionally-set variables out one scope.
+ 'chromeos%': '<(chromeos)',
+ 'use_aura%': '<(use_aura)',
+ 'use_ash%': '<(use_ash)',
+
+ # Whether we are using Views Toolkit
+ 'toolkit_views%': 0,
+
+ # Use OpenSSL instead of NSS. Under development: see http://crbug.com/62803
+ 'use_openssl%': 0,
+
+ 'use_ibus%': 0,
+
+ # Disable viewport meta tag by default.
+ 'enable_viewport%': 0,
+
+ # Enable HiDPI support.
+ 'enable_hidpi%': 0,
+
+ # Enable touch optimized art assets and metrics.
+ 'enable_touch_ui%': 0,
+
+ # Is this change part of the android upstream bringup?
+ # Allows us to *temporarily* disable certain things for
+ # staging. Only set to 1 in a GYP_DEFINES.
+ 'android_upstream_bringup%': 0,
+
+ # Override buildtype to select the desired build flavor.
+ # Dev - everyday build for development/testing
+ # Official - release build (generally implies additional processing)
+ # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp
+ # conversion is done), some of the things which are now controlled by
+ # 'branding', such as symbol generation, will need to be refactored
+ # based on 'buildtype' (i.e. we don't care about saving symbols for
+ # non-Official # builds).
+ 'buildtype%': 'Dev',
+
+ 'conditions': [
+ # ChromeOS implies ash.
+ ['chromeos==1', {
+ 'use_ash%': 1,
+ 'use_aura%': 1,
+ }],
+
+ # For now, Windows builds that |use_aura| should also imply using
+ # ash. This rule should be removed for the future when Windows is
+ # using the aura windows without the ash interface.
+ ['use_aura==1 and OS=="win"', {
+ 'use_ash%': 1,
+ }],
+ ['use_ash==1', {
+ 'use_aura%': 1,
+ }],
+
+ # A flag for BSD platforms
+ ['OS=="dragonfly" or OS=="freebsd" or OS=="netbsd" or \
+ OS=="openbsd"', {
+ 'os_bsd%': 1,
+ }, {
+ 'os_bsd%': 0,
+ }],
+ ],
+ },
+ # Copy conditionally-set variables out one scope.
+ 'chromeos%': '<(chromeos)',
+ 'use_aura%': '<(use_aura)',
+ 'use_ash%': '<(use_ash)',
+ 'os_bsd%': '<(os_bsd)',
+ 'use_openssl%': '<(use_openssl)',
+ 'use_ibus%': '<(use_ibus)',
+ 'enable_viewport%': '<(enable_viewport)',
+ 'enable_hidpi%': '<(enable_hidpi)',
+ 'enable_touch_ui%': '<(enable_touch_ui)',
+ 'android_upstream_bringup%': '<(android_upstream_bringup)',
+ 'buildtype%': '<(buildtype)',
+
+ # Sets whether we're building with the Android SDK/NDK (and hence with
+ # Ant, value 0), or as part of the Android system (and hence with the
+ # Android build system, value 1).
+ 'android_build_type%': 0,
+
+ # Compute the architecture that we're building on.
+ 'conditions': [
+ ['OS=="win" or OS=="ios"', {
+ 'host_arch%': 'ia32',
+ }, {
+ # This handles the Unix platforms for which there is some support.
+ # Anything else gets passed through, which probably won't work very
+ # well; such hosts should pass an explicit target_arch to gyp.
+ 'host_arch%':
+ '<!(uname -m | sed -e "s/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/i86pc/ia32/")',
+ }],
+
+ # Set default value of toolkit_views based on OS.
+ ['OS=="win" or chromeos==1 or use_aura==1', {
+ 'toolkit_views%': 1,
+ }, {
+ 'toolkit_views%': 0,
+ }],
+
+ # Set toolkit_uses_gtk for the Chromium browser on Linux.
+ ['(OS=="linux" or OS=="solaris" or os_bsd==1) and use_aura==0', {
+ 'toolkit_uses_gtk%': 1,
+ }, {
+ 'toolkit_uses_gtk%': 0,
+ }],
+
+ # Enable HiDPI on Mac OS and Chrome OS.
+ ['OS=="mac" or chromeos==1', {
+ 'enable_hidpi%': 1,
+ }],
+
+ # Enable touch UI on Metro.
+ ['OS=="win"', {
+ 'enable_touch_ui%': 1,
+ }],
+ ],
+ },
+
+ # Copy conditionally-set variables out one scope.
+ 'chromeos%': '<(chromeos)',
+ 'host_arch%': '<(host_arch)',
+ 'toolkit_views%': '<(toolkit_views)',
+ 'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+ 'use_aura%': '<(use_aura)',
+ 'use_ash%': '<(use_ash)',
+ 'os_bsd%': '<(os_bsd)',
+ 'use_openssl%': '<(use_openssl)',
+ 'use_ibus%': '<(use_ibus)',
+ 'enable_viewport%': '<(enable_viewport)',
+ 'enable_hidpi%': '<(enable_hidpi)',
+ 'enable_touch_ui%': '<(enable_touch_ui)',
+ 'android_upstream_bringup%': '<(android_upstream_bringup)',
+ 'android_build_type%': '<(android_build_type)',
+
+ # We used to provide a variable for changing how libraries were built.
+ # This variable remains until we can clean up all the users.
+ # This needs to be one nested variables dict in so that dependent
+ # gyp files can make use of it in their outer variables. (Yikes!)
+ # http://code.google.com/p/chromium/issues/detail?id=83308
+ 'library%': 'static_library',
+
+ # Override branding to select the desired branding flavor.
+ 'branding%': 'Chromium',
+
+ 'buildtype%': '<(buildtype)',
+
+ # Default architecture we're building for is the architecture we're
+ # building on.
+ 'target_arch%': '<(host_arch)',
+
+ # This variable tells WebCore.gyp and JavaScriptCore.gyp whether they are
+ # are built under a chromium full build (1) or a webkit.org chromium
+ # build (0).
+ 'inside_chromium_build%': 1,
+
+ # Set to 1 to enable fast builds. It disables debug info for fastest
+ # compilation.
+ 'fastbuild%': 0,
+
+ # Set to 1 to enable dcheck in release without having to use the flag.
+ 'dcheck_always_on%': 0,
+
+ # Disable file manager component extension by default.
+ 'file_manager_extension%': 0,
+
+ # Python version.
+ 'python_ver%': '2.6',
+
+ # Set ARM version (for libyuv)
+ 'arm_version%': 6,
+
+ # Set ARM-v7 compilation flags
+ 'armv7%': 0,
+
+ # Set Neon compilation flags (only meaningful if armv7==1).
+ 'arm_neon%': 1,
+ 'arm_neon_optional%': 0,
+
+ # The system root for cross-compiles. Default: none.
+ 'sysroot%': '',
+
+ # The system libdir used for this ABI.
+ 'system_libdir%': 'lib',
+
+ # On Linux, we build with sse2 for Chromium builds.
+ 'disable_sse2%': 0,
+
+ # Use libjpeg-turbo as the JPEG codec used by Chromium.
+ 'use_libjpeg_turbo%': 1,
+
+ # Use system libjpeg. Note that the system's libjepg will be used even if
+ # use_libjpeg_turbo is set.
+ 'use_system_libjpeg%': 0,
+
+ # Use system libvpx
+ 'use_system_libvpx%': 0,
+
+ # Variable 'component' is for cases where we would like to build some
+ # components as dynamic shared libraries but still need variable
+ # 'library' for static libraries.
+ # By default, component is set to whatever library is set to and
+ # it can be overriden by the GYP command line or by ~/.gyp/include.gypi.
+ 'component%': 'static_library',
+
+ # Set to select the Title Case versions of strings in GRD files.
+ 'use_titlecase_in_grd_files%': 0,
+
+ # Use translations provided by volunteers at launchpad.net. This
+ # currently only works on Linux.
+ 'use_third_party_translations%': 0,
+
+ # Remoting compilation is enabled by default. Set to 0 to disable.
+ 'remoting%': 1,
+
+ # Configuration policy is enabled by default. Set to 0 to disable.
+ 'configuration_policy%': 1,
+
+ # Safe browsing is compiled in by default. Set to 0 to disable.
+ 'safe_browsing%': 1,
+
+ # Speech input is compiled in by default. Set to 0 to disable.
+ 'input_speech%': 1,
+
+ # Notifications are compiled in by default. Set to 0 to disable.
+ 'notifications%' : 1,
+
+ # If this is set, the clang plugins used on the buildbot will be used.
+ # Run tools/clang/scripts/update.sh to make sure they are compiled.
+ # This causes 'clang_chrome_plugins_flags' to be set.
+ # Has no effect if 'clang' is not set as well.
+ 'clang_use_chrome_plugins%': 1,
+
+ # Enable building with ASAN (Clang's -faddress-sanitizer option).
+ # -faddress-sanitizer only works with clang, but asan=1 implies clang=1
+ # See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
+ 'asan%': 0,
+
+ # Enable building with TSAN (Clang's -fthread-sanitizer option).
+ # -fthread-sanitizer only works with clang, but tsan=1 implies clang=1
+ # See http://clang.llvm.org/docs/ThreadSanitizer.html
+ 'tsan%': 0,
+
+ # Use a modified version of Clang to intercept allocated types and sizes
+ # for allocated objects. clang_type_profiler=1 implies clang=1.
+ # See http://dev.chromium.org/developers/deep-memory-profiler/cpp-object-type-identifier
+ # TODO(dmikurube): Support mac. See http://crbug.com/123758#c11
+ 'clang_type_profiler%': 0,
+
+ # Set to true to instrument the code with function call logger.
+ # See src/third_party/cygprofile/cyg-profile.cc for details.
+ 'order_profiling%': 0,
+
+ # Use the provided profiled order file to link Chrome image with it.
+ # This makes Chrome faster by better using CPU cache when executing code.
+ # This is known as PGO (profile guided optimization).
+ # See https://sites.google.com/a/google.com/chrome-msk/dev/boot-speed-up-effort
+ 'order_text_section%' : "",
+
+ # Set to 1 compile with -fPIC cflag on linux. This is a must for shared
+ # libraries on linux x86-64 and arm, plus ASLR.
+ 'linux_fpic%': 1,
+
+ # Whether one-click signin is enabled or not.
+ 'enable_one_click_signin%': 0,
+
+ # Enable Web Intents support in WebKit.
+ 'enable_web_intents%': 1,
+
+ # Enable Chrome browser extensions
+ 'enable_extensions%': 1,
+
+ # Enable browser automation.
+ 'enable_automation%': 1,
+
+ # Enable printing support and UI.
+ 'enable_printing%': 1,
+
+ # Enable Web Intents web content registration via HTML element
+ # and WebUI managing such registrations.
+ 'enable_web_intents_tag%': 0,
+
+ # Webrtc compilation is enabled by default. Set to 0 to disable.
+ 'enable_webrtc%': 1,
+
+ # PPAPI by default does not support plugins making calls off the main
+ # thread. Set to 1 to turn on experimental support for out-of-process
+ # plugins to make call of the main thread.
+ 'enable_pepper_threading%': 0,
+
+ # Enables use of the session service, which is enabled by default.
+ # Support for disabling depends on the platform.
+ 'enable_session_service%': 1,
+
+ # Enables theme support, which is enabled by default. Support for
+ # disabling depends on the platform.
+ 'enable_themes%': 1,
+
+ # Uses OEM-specific wallpaper resources on Chrome OS.
+ 'use_oem_wallpaper%': 0,
+
+ # Enables support for background apps.
+ 'enable_background%': 1,
+
+ # Enable the task manager by default.
+ 'enable_task_manager%': 1,
+
+ # Enable FTP support by default.
+ 'disable_ftp_support%': 0,
+
+ # XInput2 multitouch support is disabled by default (use_xi2_mt=0).
+ # Setting to non-zero value enables XI2 MT. When XI2 MT is enabled,
+ # the input value also defines the required XI2 minor minimum version.
+ # For example, use_xi2_mt=2 means XI2.2 or above version is required.
+ 'use_xi2_mt%': 0,
+
+ # Use of precompiled headers on Windows.
+ #
+ # This is on by default in VS 2010, but off by default for VS
+ # 2008 because of complications that it can cause with our
+ # trybots etc.
+ #
+ # This variable may be explicitly set to 1 (enabled) or 0
+ # (disabled) in ~/.gyp/include.gypi or via the GYP command line.
+ # This setting will override the default.
+ #
+ # Note that a setting of 1 is probably suitable for most or all
+ # Windows developers using VS 2008, since precompiled headers
+ # provide a build speedup of 20-25%. There are a couple of
+ # small workarounds you may need to use when using VS 2008 (but
+ # not 2010), see
+ # http://code.google.com/p/chromium/wiki/WindowsPrecompiledHeaders
+ # for details.
+ 'chromium_win_pch%': 0,
+
+ # Set this to true when building with Clang.
+ # See http://code.google.com/p/chromium/wiki/Clang for details.
+ 'clang%': 0,
+
+ # Enable plug-in installation by default.
+ 'enable_plugin_installation%': 1,
+
+ # Enable protector service by default.
+ 'enable_protector_service%': 1,
+
+ # Specifies whether to use canvas_skia.cc in place of platform
+ # specific implementations of gfx::Canvas. Affects text drawing in the
+ # Chrome UI.
+ # TODO(asvitkine): Enable this on all platforms and delete this flag.
+ # http://crbug.com/105550
+ 'use_canvas_skia%': 0,
+
+ # Set to "tsan", "memcheck", or "drmemory" to configure the build to work
+ # with one of those tools.
+ 'build_for_tool%': '',
+
+ # Whether tests targets should be run, archived or just have the
+ # dependencies verified. All the tests targets have the '_run' suffix,
+ # e.g. base_unittests_run runs the target base_unittests. The test target
+ # always calls tools/swarm_client/isolate.py. See the script's --help for
+ # more information and the valid --mode values. Meant to be overriden with
+ # GYP_DEFINES.
+ # TODO(maruel): Converted the default from 'check' to 'noop' so work can
+ # be done while the builders are being reconfigured to check out test data
+ # files.
+ 'test_isolation_mode%': 'noop',
+ # It must not be '<(PRODUCT_DIR)' alone, the '/' is necessary otherwise
+ # gyp will remove duplicate flags, causing isolate.py to be confused.
+ 'test_isolation_outdir%': '<(PRODUCT_DIR)/isolate',
+
+ # Force rlz to use chrome's networking stack.
+ 'force_rlz_use_chrome_net%': 1,
+
+ 'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/x86',
+ 'wix_path%': '<(DEPTH)/third_party/wix',
+
+ 'conditions': [
+ # TODO(epoger): Figure out how to set use_skia=1 for Mac outside of
+ # the 'conditions' clause. Initial attempts resulted in chromium and
+ # webkit disagreeing on its setting.
+ ['OS=="mac"', {
+ 'use_skia%': 1,
+ }, {
+ 'use_skia%': 1,
+ }],
+
+ # A flag for POSIX platforms
+ ['OS=="win"', {
+ 'os_posix%': 0,
+ }, {
+ 'os_posix%': 1,
+ }],
+
+ # NSS usage.
+ ['(OS=="linux" or OS=="solaris" or os_bsd==1) and use_openssl==0', {
+ 'use_nss%': 1,
+ }, {
+ 'use_nss%': 0,
+ }],
+
+ # Flags to use X11 on non-Mac POSIX platforms
+ ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or moz_widget_toolkit_gonk==1', {
+ 'use_glib%': 0,
+ 'use_x11%': 0,
+ }, {
+ 'use_glib%': 1,
+ 'use_x11%': 1,
+ }],
+
+ # We always use skia text rendering in Aura on Windows, since GDI
+ # doesn't agree with our BackingStore.
+ # TODO(beng): remove once skia text rendering is on by default.
+ ['use_aura==1 and OS=="win"', {
+ 'enable_skia_text%': 1,
+ }],
+
+ # A flag to enable or disable our compile-time dependency
+ # on gnome-keyring. If that dependency is disabled, no gnome-keyring
+ # support will be available. This option is useful
+ # for Linux distributions and for Aura.
+ ['chromeos==1 or use_aura==1', {
+ 'use_gnome_keyring%': 0,
+ }, {
+ 'use_gnome_keyring%': 1,
+ }],
+
+ ['toolkit_uses_gtk==1 or OS=="mac" or OS=="ios"', {
+ # GTK+, Mac and iOS want Title Case strings
+ 'use_titlecase_in_grd_files%': 1,
+ }],
+
+ # Enable file manager extension on Chrome OS.
+ ['chromeos==1', {
+ 'file_manager_extension%': 1,
+ }, {
+ 'file_manager_extension%': 0,
+ }],
+
+ ['OS=="win" or OS=="mac" or (OS=="linux" and use_aura==0)', {
+ 'enable_one_click_signin%': 1,
+ }],
+
+ ['OS=="android"', {
+ 'enable_extensions%': 0,
+ 'enable_printing%': 0,
+ 'enable_themes%': 0,
+ 'enable_webrtc%': 0,
+ 'proprietary_codecs%': 1,
+ 'remoting%': 0,
+ }],
+
+ ['OS=="ios"', {
+ 'configuration_policy%': 0,
+ 'disable_ftp_support%': 1,
+ 'enable_automation%': 0,
+ 'enable_extensions%': 0,
+ 'enable_printing%': 0,
+ 'enable_themes%': 0,
+ 'enable_webrtc%': 0,
+ 'notifications%': 0,
+ 'remoting%': 0,
+ }],
+
+ # Use GPU accelerated cross process image transport by default
+ # on linux builds with the Aura window manager
+ ['use_aura==1 and OS=="linux"', {
+ 'ui_compositor_image_transport%': 1,
+ }, {
+ 'ui_compositor_image_transport%': 0,
+ }],
+
+ # Turn precompiled headers on by default for VS 2010.
+ ['OS=="win" and MSVS_VERSION=="2010" and buildtype!="Official"', {
+ 'chromium_win_pch%': 1
+ }],
+
+ ['use_aura==1 or chromeos==1 or OS=="android"', {
+ 'enable_plugin_installation%': 0,
+ }, {
+ 'enable_plugin_installation%': 1,
+ }],
+
+ ['OS=="android" or OS=="ios"', {
+ 'enable_protector_service%': 0,
+ }, {
+ 'enable_protector_service%': 1,
+ }],
+
+ # linux_use_gold_binary: whether to use the binary checked into
+ # third_party/gold.
+ ['OS=="linux"', {
+ 'linux_use_gold_binary%': 1,
+ }, {
+ 'linux_use_gold_binary%': 0,
+ }],
+
+ # linux_use_gold_flags: whether to use build flags that rely on gold.
+ # On by default for x64 Linux. Temporarily off for ChromeOS as
+ # it failed on a buildbot.
+ ['OS=="linux" and chromeos==0', {
+ 'linux_use_gold_flags%': 1,
+ }, {
+ 'linux_use_gold_flags%': 0,
+ }],
+
+ ['OS=="android"', {
+ 'enable_captive_portal_detection%': 0,
+ }, {
+ 'enable_captive_portal_detection%': 1,
+ }],
+
+ # Enable Skia UI text drawing incrementally on different platforms.
+ # http://crbug.com/105550
+ #
+ # On Aura, this allows per-tile painting to be used in the browser
+ # compositor.
+ ['OS!="mac" and OS!="android"', {
+ 'use_canvas_skia%': 1,
+ }],
+
+ ['chromeos==1', {
+ # When building for ChromeOS we dont want Chromium to use libjpeg_turbo.
+ 'use_libjpeg_turbo%': 0,
+ }],
+
+ ['OS=="android"', {
+ # When building as part of the Android system, use system libraries
+ # where possible to reduce ROM size.
+ 'use_system_libjpeg%': '<(android_build_type)',
+ }],
+ ],
+
+ # Set this to 1 to use the Google-internal file containing
+ # official API keys for Google Chrome even in a developer build.
+ # Setting this variable explicitly to 1 will cause your build to
+ # fail if the internal file is missing.
+ #
+ # Set this to 0 to not use the internal file, even when it
+ # exists in your checkout.
+ #
+ # Leave set to 2 to have this variable implicitly set to 1 if
+ # you have src/google_apis/internal/google_chrome_api_keys.h in
+ # your checkout, and implicitly set to 0 if not.
+ #
+ # Note that official builds always behave as if this variable
+ # was explicitly set to 1, i.e. they always use official keys,
+ # and will fail to build if the internal file is missing.
+ 'use_official_google_api_keys%': 2,
+
+ # Set these to bake the specified API keys and OAuth client
+ # IDs/secrets into your build.
+ #
+ # If you create a build without values baked in, you can instead
+ # set environment variables to provide the keys at runtime (see
+ # src/google_apis/google_api_keys.h for details). Features that
+ # require server-side APIs may fail to work if no keys are
+ # provided.
+ #
+ # Note that if you are building an official build or if
+ # use_official_google_api_keys has been set to 1 (explicitly or
+ # implicitly), these values will be ignored and the official
+ # keys will be used instead.
+ 'google_api_key%': '',
+ 'google_default_client_id%': '',
+ 'google_default_client_secret%': '',
+ },
+
+ # Copy conditionally-set variables out one scope.
+ 'branding%': '<(branding)',
+ 'buildtype%': '<(buildtype)',
+ 'target_arch%': '<(target_arch)',
+ 'host_arch%': '<(host_arch)',
+ 'library%': 'static_library',
+ 'toolkit_views%': '<(toolkit_views)',
+ 'ui_compositor_image_transport%': '<(ui_compositor_image_transport)',
+ 'use_aura%': '<(use_aura)',
+ 'use_ash%': '<(use_ash)',
+ 'use_openssl%': '<(use_openssl)',
+ 'use_ibus%': '<(use_ibus)',
+ 'use_nss%': '<(use_nss)',
+ 'os_bsd%': '<(os_bsd)',
+ 'os_posix%': '<(os_posix)',
+ 'use_glib%': '<(use_glib)',
+ 'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+ 'use_skia%': '<(use_skia)',
+ 'use_x11%': '<(use_x11)',
+ 'use_gnome_keyring%': '<(use_gnome_keyring)',
+ 'linux_fpic%': '<(linux_fpic)',
+ 'enable_pepper_threading%': '<(enable_pepper_threading)',
+ 'chromeos%': '<(chromeos)',
+ 'enable_viewport%': '<(enable_viewport)',
+ 'enable_hidpi%': '<(enable_hidpi)',
+ 'enable_touch_ui%': '<(enable_touch_ui)',
+ 'use_xi2_mt%':'<(use_xi2_mt)',
+ 'file_manager_extension%': '<(file_manager_extension)',
+ 'inside_chromium_build%': '<(inside_chromium_build)',
+ 'fastbuild%': '<(fastbuild)',
+ 'dcheck_always_on%': '<(dcheck_always_on)',
+ 'python_ver%': '<(python_ver)',
+ 'arm_version%': '<(arm_version)',
+ 'armv7%': '<(armv7)',
+ 'arm_neon%': '<(arm_neon)',
+ 'arm_neon_optional%': '<(arm_neon_optional)',
+ 'sysroot%': '<(sysroot)',
+ 'system_libdir%': '<(system_libdir)',
+ 'component%': '<(component)',
+ 'use_titlecase_in_grd_files%': '<(use_titlecase_in_grd_files)',
+ 'use_third_party_translations%': '<(use_third_party_translations)',
+ 'remoting%': '<(remoting)',
+ 'enable_one_click_signin%': '<(enable_one_click_signin)',
+ 'enable_webrtc%': '<(enable_webrtc)',
+ 'chromium_win_pch%': '<(chromium_win_pch)',
+ 'configuration_policy%': '<(configuration_policy)',
+ 'safe_browsing%': '<(safe_browsing)',
+ 'input_speech%': '<(input_speech)',
+ 'notifications%': '<(notifications)',
+ 'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)',
+ 'asan%': '<(asan)',
+ 'tsan%': '<(tsan)',
+ 'clang_type_profiler%': '<(clang_type_profiler)',
+ 'order_profiling%': '<(order_profiling)',
+ 'order_text_section%': '<(order_text_section)',
+ 'enable_extensions%': '<(enable_extensions)',
+ 'enable_web_intents%': '<(enable_web_intents)',
+ 'enable_web_intents_tag%': '<(enable_web_intents_tag)',
+ 'enable_plugin_installation%': '<(enable_plugin_installation)',
+ 'enable_protector_service%': '<(enable_protector_service)',
+ 'enable_session_service%': '<(enable_session_service)',
+ 'enable_themes%': '<(enable_themes)',
+ 'use_oem_wallpaper%': '<(use_oem_wallpaper)',
+ 'enable_background%': '<(enable_background)',
+ 'linux_use_gold_binary%': '<(linux_use_gold_binary)',
+ 'linux_use_gold_flags%': '<(linux_use_gold_flags)',
+ 'use_canvas_skia%': '<(use_canvas_skia)',
+ 'test_isolation_mode%': '<(test_isolation_mode)',
+ 'test_isolation_outdir%': '<(test_isolation_outdir)',
+ 'enable_automation%': '<(enable_automation)',
+ 'enable_printing%': '<(enable_printing)',
+ 'enable_captive_portal_detection%': '<(enable_captive_portal_detection)',
+ 'disable_ftp_support%': '<(disable_ftp_support)',
+ 'force_rlz_use_chrome_net%': '<(force_rlz_use_chrome_net)',
+ 'enable_task_manager%': '<(enable_task_manager)',
+ 'sas_dll_path%': '<(sas_dll_path)',
+ 'wix_path%': '<(wix_path)',
+ 'android_upstream_bringup%': '<(android_upstream_bringup)',
+ 'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+ 'use_system_libjpeg%': '<(use_system_libjpeg)',
+ 'android_build_type%': '<(android_build_type)',
+ 'use_official_google_api_keys%': '<(use_official_google_api_keys)',
+ 'google_api_key%': '<(google_api_key)',
+ 'google_default_client_id%': '<(google_default_client_id)',
+ 'google_default_client_secret%': '<(google_default_client_secret)',
+
+ # Use system yasm instead of bundled one.
+ 'use_system_yasm%': 0,
+
+ # Default to enabled PIE; this is important for ASLR but we may need to be
+ # able to turn it off for various reasons.
+ 'linux_disable_pie%': 0,
+
+ # The release channel that this build targets. This is used to restrict
+ # channel-specific build options, like which installer packages to create.
+ # The default is 'all', which does no channel-specific filtering.
+ 'channel%': 'all',
+
+ # Override chromium_mac_pch and set it to 0 to suppress the use of
+ # precompiled headers on the Mac. Prefix header injection may still be
+ # used, but prefix headers will not be precompiled. This is useful when
+ # using distcc to distribute a build to compile slaves that don't
+ # share the same compiler executable as the system driving the compilation,
+ # because precompiled headers rely on pointers into a specific compiler
+ # executable's image. Setting this to 0 is needed to use an experimental
+ # Linux-Mac cross compiler distcc farm.
+ 'chromium_mac_pch%': 1,
+
+ # The default value for mac_strip in target_defaults. This cannot be
+ # set there, per the comment about variable% in a target_defaults.
+ 'mac_strip_release%': 1,
+
+ # Set to 1 to enable code coverage. In addition to build changes
+ # (e.g. extra CFLAGS), also creates a new target in the src/chrome
+ # project file called "coverage".
+ # Currently ignored on Windows.
+ 'coverage%': 0,
+
+ # Set to 1 to force Visual C++ to use legacy debug information format /Z7.
+ # This is useful for parallel compilation tools which can't support /Zi.
+ # Only used on Windows.
+ 'win_z7%' : 0,
+
+ # Although base/allocator lets you select a heap library via an
+ # environment variable, the libcmt shim it uses sometimes gets in
+ # the way. To disable it entirely, and switch to normal msvcrt, do e.g.
+ # 'win_use_allocator_shim': 0,
+ # 'win_release_RuntimeLibrary': 2
+ # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
+ 'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
+
+ # Whether usage of OpenMAX is enabled.
+ 'enable_openmax%': 0,
+
+ # Whether proprietary audio/video codecs are assumed to be included with
+ # this build (only meaningful if branding!=Chrome).
+ 'proprietary_codecs%': 0,
+
+ # TODO(bradnelson): eliminate this when possible.
+ # To allow local gyp files to prevent release.vsprops from being included.
+ # Yes(1) means include release.vsprops.
+ # Once all vsprops settings are migrated into gyp, this can go away.
+ 'msvs_use_common_release%': 1,
+
+ # TODO(bradnelson): eliminate this when possible.
+ # To allow local gyp files to override additional linker options for msvs.
+ # Yes(1) means set use the common linker options.
+ 'msvs_use_common_linker_extras%': 1,
+
+ # TODO(sgk): eliminate this if possible.
+ # It would be nicer to support this via a setting in 'target_defaults'
+ # in chrome/app/locales/locales.gypi overriding the setting in the
+ # 'Debug' configuration in the 'target_defaults' dict below,
+ # but that doesn't work as we'd like.
+ 'msvs_debug_link_incremental%': '2',
+
+ # Needed for some of the largest modules.
+ 'msvs_debug_link_nonincremental%': '1',
+
+ # Turns on Use Library Dependency Inputs for linking chrome.dll on Windows
+ # to get incremental linking to be faster in debug builds.
+ 'incremental_chrome_dll%': '0',
+
+ # The default settings for third party code for treating
+ # warnings-as-errors. Ideally, this would not be required, however there
+ # is some third party code that takes a long time to fix/roll. So, this
+ # flag allows us to have warnings as errors in general to prevent
+ # regressions in most modules, while working on the bits that are
+ # remaining.
+ 'win_third_party_warn_as_error%': 'true',
+
+ # This is the location of the sandbox binary. Chrome looks for this before
+ # running the zygote process. If found, and SUID, it will be used to
+ # sandbox the zygote process and, thus, all renderer processes.
+ 'linux_sandbox_path%': '',
+
+ # Set this to true to enable SELinux support.
+ 'selinux%': 0,
+
+ # Clang stuff.
+ 'clang%': '<(clang)',
+ 'make_clang_dir%': 'third_party/llvm-build/Release+Asserts',
+
+ # These two variables can be set in GYP_DEFINES while running
+ # |gclient runhooks| to let clang run a plugin in every compilation.
+ # Only has an effect if 'clang=1' is in GYP_DEFINES as well.
+ # Example:
+ # GYP_DEFINES='clang=1 clang_load=/abs/path/to/libPrintFunctionNames.dylib clang_add_plugin=print-fns' gclient runhooks
+
+ 'clang_load%': '',
+ 'clang_add_plugin%': '',
+
+ # The default type of gtest.
+ 'gtest_target_type%': 'executable',
+
+ # Enable sampling based profiler.
+ # See http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html
+ 'profiling%': '0',
+
+ # Enable strict glibc debug mode.
+ 'glibcxx_debug%': 0,
+
+ # Override whether we should use Breakpad on Linux. I.e. for Chrome bot.
+ 'linux_breakpad%': 0,
+ # And if we want to dump symbols for Breakpad-enabled builds.
+ 'linux_dump_symbols%': 0,
+ # And if we want to strip the binary after dumping symbols.
+ 'linux_strip_binary%': 0,
+ # Strip the test binaries needed for Linux reliability tests.
+ 'linux_strip_reliability_tests%': 0,
+
+ # Enable TCMalloc.
+ 'linux_use_tcmalloc%': 1,
+
+ # Disable TCMalloc's debugallocation.
+ 'linux_use_debugallocation%': 0,
+
+ # Disable TCMalloc's heapchecker.
+ 'linux_use_heapchecker%': 0,
+
+ # Disable shadow stack keeping used by heapcheck to unwind the stacks
+ # better.
+ 'linux_keep_shadow_stacks%': 0,
+
+ # Set to 1 to link against libgnome-keyring instead of using dlopen().
+ 'linux_link_gnome_keyring%': 0,
+ # Set to 1 to link against gsettings APIs instead of using dlopen().
+ 'linux_link_gsettings%': 0,
+
+ # Set Thumb compilation flags.
+ 'arm_thumb%': 0,
+
+ # Set ARM fpu compilation flags (only meaningful if armv7==1 and
+ # arm_neon==0).
+ 'arm_fpu%': 'vfpv3',
+
+ # Set ARM float abi compilation flag.
+ 'arm_float_abi%': 'softfp',
+
+ # Enable new NPDevice API.
+ 'enable_new_npdevice_api%': 0,
+
+ # Enable EGLImage support in OpenMAX
+ 'enable_eglimage%': 1,
+
+ # Enable a variable used elsewhere throughout the GYP files to determine
+ # whether to compile in the sources for the GPU plugin / process.
+ 'enable_gpu%': 1,
+
+ # .gyp files or targets should set chromium_code to 1 if they build
+ # Chromium-specific code, as opposed to external code. This variable is
+ # used to control such things as the set of warnings to enable, and
+ # whether warnings are treated as errors.
+ 'chromium_code%': 0,
+
+ 'release_valgrind_build%': 0,
+
+ # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+ 'enable_wexit_time_destructors%': 0,
+
+ # Set to 1 to compile with the built in pdf viewer.
+ 'internal_pdf%': 0,
+
+ # Set to 1 to compile with the OpenGL ES 2.0 conformance tests.
+ 'internal_gles2_conform_tests%': 0,
+
+ # NOTE: When these end up in the Mac bundle, we need to replace '-' for '_'
+ # so Cocoa is happy (http://crbug.com/20441).
+ 'locales': [
+ 'am', 'ar', 'bg', 'bn', 'ca', 'cs', 'da', 'de', 'el', 'en-GB',
+ 'en-US', 'es-419', 'es', 'et', 'fa', 'fi', 'fil', 'fr', 'gu', 'he',
+ 'hi', 'hr', 'hu', 'id', 'it', 'ja', 'kn', 'ko', 'lt', 'lv',
+ 'ml', 'mr', 'ms', 'nb', 'nl', 'pl', 'pt-BR', 'pt-PT', 'ro', 'ru',
+ 'sk', 'sl', 'sr', 'sv', 'sw', 'ta', 'te', 'th', 'tr', 'uk',
+ 'vi', 'zh-CN', 'zh-TW',
+ ],
+
+ # Pseudo locales are special locales which are used for testing and
+ # debugging. They don't get copied to the final app. For more info,
+ # check out https://sites.google.com/a/chromium.org/dev/Home/fake-bidi
+ 'pseudo_locales': [
+ 'fake-bidi',
+ ],
+
+ 'grit_defines': [],
+
+ # If debug_devtools is set to 1, JavaScript files for DevTools are
+ # stored as is and loaded from disk. Otherwise, a concatenated file
+ # is stored in resources.pak. It is still possible to load JS files
+ # from disk by passing --debug-devtools cmdline switch.
+ 'debug_devtools%': 0,
+
+ # The Java Bridge is not compiled in by default.
+ 'java_bridge%': 0,
+
+ # Code signing for iOS binaries. The bots need to be able to disable this.
+ 'chromium_ios_signing%': 1,
+
+ # This flag is only used when disable_nacl==0 and disables all those
+ # subcomponents which would require the installation of a native_client
+ # untrusted toolchain.
+ 'disable_nacl_untrusted%': 0,
+
+ # Disable Dart by default.
+ 'enable_dart%': 0,
+
+ # The desired version of Windows SDK can be set in ~/.gyp/include.gypi.
+ 'msbuild_toolset%': '',
+
+ # Native Client is enabled by default.
+ 'disable_nacl%': 0,
+
+ # Whether to build full debug version for Debug configuration on Android.
+ # Compared to full debug version, the default Debug configuration on Android
+ # has no full v8 debug, has size optimization and linker gc section, so that
+ # we can build a debug version with acceptable size and performance.
+ 'android_full_debug%': 0,
+
+ # Sets the default version name and code for Android app, by default we
+ # do a developer build.
+ 'android_app_version_name%': 'Developer Build',
+ 'android_app_version_code%': 0,
+
+ 'sas_dll_exists': '<!(<(PYTHON) <(DEPTH)/build/dir_exists.py <(sas_dll_path))',
+ 'wix_exists': '<!(<(PYTHON) <(DEPTH)/build/dir_exists.py <(wix_path))',
+
+ 'windows_sdk_default_path': '<(DEPTH)/third_party/platformsdk_win8/files',
+ 'directx_sdk_default_path': '<(DEPTH)/third_party/directxsdk/files',
+
+ 'conditions': [
+ ['"<!(<(PYTHON) <(DEPTH)/build/dir_exists.py <(windows_sdk_default_path))"=="True"', {
+ 'windows_sdk_path%': '<(windows_sdk_default_path)',
+ }, {
+ 'windows_sdk_path%': 'C:/Program Files (x86)/Windows Kits/8.0',
+ }],
+ ['OS=="win" and "<!(<(PYTHON) <(DEPTH)/build/dir_exists.py <(directx_sdk_default_path))"=="True"', {
+ 'directx_sdk_path%': '<(directx_sdk_default_path)',
+ }, {
+ 'directx_sdk_path%': '$(DXSDK_DIR)',
+ }],
+ # If use_official_google_api_keys is already set (to 0 or 1), we
+ # do none of the implicit checking. If it is set to 1 and the
+ # internal keys file is missing, the build will fail at compile
+ # time. If it is set to 0 and keys are not provided by other
+ # means, a warning will be printed at compile time.
+ ['use_official_google_api_keys==2', {
+ 'use_official_google_api_keys%':
+ '<!(<(PYTHON) <(DEPTH)/google_apis/build/check_internal.py <(DEPTH)/google_apis/internal/google_chrome_api_keys.h)',
+ }],
+ ['os_posix==1 and OS!="mac" and OS!="ios"', {
+ # Figure out the python architecture to decide if we build pyauto.
+ # disabled for mozilla because windows != mac and this runs a shell script
+ # 'python_arch%': '<!(<(DEPTH)/build/linux/python_arch.sh <(sysroot)/usr/<(system_libdir)/libpython<(python_ver).so.1.0)',
+ 'conditions': [
+ # TODO(glider): set clang to 1 earlier for ASan and TSan builds so
+ # that it takes effect here.
+ # disabled for Mozilla since it doesn't use this, and 'msys' messes $(CXX) up
+ ['build_with_mozilla==0 and clang==0 and asan==0 and tsan==0', {
+ # This will set gcc_version to XY if you are running gcc X.Y.*.
+ 'gcc_version%': '<!(<(PYTHON) <(DEPTH)/build/compiler_version.py)',
+ }, {
+ 'gcc_version%': 0,
+ }],
+ ['branding=="Chrome"', {
+ 'linux_breakpad%': 1,
+ }],
+ # All Chrome builds have breakpad symbols, but only process the
+ # symbols from official builds.
+ ['(branding=="Chrome" and buildtype=="Official")', {
+ 'linux_dump_symbols%': 1,
+ }],
+ ],
+ }], # os_posix==1 and OS!="mac" and OS!="ios"
+ ['OS=="ios"', {
+ 'disable_nacl%': 1,
+ 'enable_gpu%': 0,
+ 'icu_use_data_file_flag%': 1,
+ 'use_system_bzip2%': 1,
+ 'use_system_libxml%': 1,
+ 'use_system_sqlite%': 1,
+
+ # The Mac SDK is set for iOS builds and passed through to Mac
+ # sub-builds. This allows the Mac sub-build SDK in an iOS build to be
+ # overridden from the command line the same way it is for a Mac build.
+ 'mac_sdk%': '<!(<(PYTHON) <(DEPTH)/build/mac/find_sdk.py 10.6)',
+
+ # iOS SDK and deployment target support. The iOS 5.0 SDK is actually
+ # what is required, but the value is left blank so when it is set in
+ # the project files it will be the "current" iOS SDK. Forcing 5.0
+ # even though it is "current" causes Xcode to spit out a warning for
+ # every single project file for not using the "current" SDK.
+ 'ios_sdk%': '',
+ 'ios_sdk_path%': '',
+ 'ios_deployment_target%': '4.3',
+
+ 'conditions': [
+ # ios_product_name is set to the name of the .app bundle as it should
+ # appear on disk.
+ ['branding=="Chrome"', {
+ 'ios_product_name%': 'Chrome',
+ }, { # else: branding!="Chrome"
+ 'ios_product_name%': 'Chromium',
+ }],
+ ['branding=="Chrome" and buildtype=="Official"', {
+ 'ios_breakpad%': 1,
+ }, { # else: branding!="Chrome" or buildtype!="Official"
+ 'ios_breakpad%': 0,
+ }],
+ ],
+ }], # OS=="ios"
+ ['OS=="android"', {
+ # Location of Android NDK.
+ 'variables': {
+ 'variables': {
+ 'variables': {
+ 'android_ndk_root%': '<!(/bin/echo -n $ANDROID_NDK_ROOT)',
+ },
+ 'android_ndk_root%': '<(android_ndk_root)',
+ 'conditions': [
+ ['target_arch == "ia32"', {
+ 'android_app_abi%': 'x86',
+ 'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-9/arch-x86',
+ }],
+ ['target_arch=="arm"', {
+ 'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-9/arch-arm',
+ 'conditions': [
+ ['armv7==0', {
+ 'android_app_abi%': 'armeabi',
+ }, {
+ 'android_app_abi%': 'armeabi-v7a',
+ }],
+ ],
+ }],
+ ['target_arch=="arm64"', {
+ 'android_app_abi%': 'arm64-v8a',
+ 'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-arm64',
+ }],
+ ],
+ },
+ 'android_ndk_root%': '<(android_ndk_root)',
+ 'android_app_abi%': '<(android_app_abi)',
+ 'android_ndk_sysroot%': '<(android_ndk_sysroot)',
+ },
+ 'android_ndk_root%': '<(android_ndk_root)',
+ 'android_ndk_sysroot': '<(android_ndk_sysroot)',
+ 'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+ 'android_ndk_lib': '<(android_ndk_sysroot)/usr/lib',
+ 'android_app_abi%': '<(android_app_abi)',
+
+ # Location of the "strip" binary, used by both gyp and scripts.
+ 'android_strip%' : '<!(/bin/echo -n <(android_toolchain)/*-strip)',
+
+ # Provides an absolute path to PRODUCT_DIR (e.g. out/Release). Used
+ # to specify the output directory for Ant in the Android build.
+ 'ant_build_out': '`cd <(PRODUCT_DIR) && pwd -P`',
+
+ # Uses Android's crash report system
+ 'linux_breakpad%': 0,
+
+ # Always uses openssl.
+ 'use_openssl%': 1,
+
+ 'proprietary_codecs%': '<(proprietary_codecs)',
+ 'enable_task_manager%': 0,
+ 'safe_browsing%': 0,
+ 'configuration_policy%': 0,
+ 'input_speech%': 0,
+ 'enable_web_intents%': 0,
+ 'enable_automation%': 0,
+ 'java_bridge%': 1,
+ 'build_ffmpegsumo%': 0,
+ 'linux_use_tcmalloc%': 0,
+
+ # Disable Native Client.
+ 'disable_nacl%': 1,
+
+ # Android does not support background apps.
+ 'enable_background%': 0,
+
+ # Sessions are store separately in the Java side.
+ 'enable_session_service%': 0,
+
+ # Set to 1 once we have a notification system for Android.
+ # http://crbug.com/115320
+ 'notifications%': 0,
+
+ 'p2p_apis%' : 0,
+
+ # TODO(jrg): when 'gtest_target_type'=='shared_library' and
+ # OS==android, make all gtest_targets depend on
+ # testing/android/native_test.gyp:native_test_apk.
+ 'gtest_target_type%': 'shared_library',
+
+ # Uses system APIs for decoding audio and video.
+ 'use_libffmpeg%': '0',
+
+ # Always use the chromium skia. The use_system_harfbuzz needs to
+ # match use_system_skia.
+ 'use_system_skia%': '0',
+ 'use_system_harfbuzz%': '0',
+
+ # Configure crash reporting and build options based on release type.
+ 'conditions': [
+ ['buildtype=="Official"', {
+ # Only report crash dumps for Official builds.
+ 'linux_breakpad%': 1,
+ }, {
+ 'linux_breakpad%': 0,
+ }],
+ ],
+
+ # When building as part of the Android system, use system libraries
+ # where possible to reduce ROM size.
+ # TODO(steveblock): Investigate using the system version of sqlite.
+ 'use_system_sqlite%': 0, # '<(android_build_type)',
+ 'use_system_expat%': '<(android_build_type)',
+ 'use_system_icu%': '<(android_build_type)',
+ 'use_system_stlport%': '<(android_build_type)',
+
+ # Copy it out one scope.
+ 'android_build_type%': '<(android_build_type)',
+ }], # OS=="android"
+ ['OS=="mac"', {
+ 'variables': {
+ # Mac OS X SDK and deployment target support. The SDK identifies
+ # the version of the system headers that will be used, and
+ # corresponds to the MAC_OS_X_VERSION_MAX_ALLOWED compile-time
+ # macro. "Maximum allowed" refers to the operating system version
+ # whose APIs are available in the headers. The deployment target
+ # identifies the minimum system version that the built products are
+ # expected to function on. It corresponds to the
+ # MAC_OS_X_VERSION_MIN_REQUIRED compile-time macro. To ensure these
+ # macros are available, #include <AvailabilityMacros.h>. Additional
+ # documentation on these macros is available at
+ # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3
+ # Chrome normally builds with the Mac OS X 10.6 SDK and sets the
+ # deployment target to 10.6. Other projects, such as O3D, may
+ # override these defaults.
+
+ # Normally, mac_sdk_min is used to find an SDK that Xcode knows
+ # about that is at least the specified version. In official builds,
+ # the SDK must match mac_sdk_min exactly. If the SDK is installed
+ # someplace that Xcode doesn't know about, set mac_sdk_path to the
+ # path to the SDK; when set to a non-empty string, SDK detection
+ # based on mac_sdk_min will be bypassed entirely.
+ 'mac_sdk_min%': '10.6',
+ 'mac_sdk_path%': '',
+
+ 'mac_deployment_target%': '10.6',
+ },
+
+ 'mac_sdk_min': '<(mac_sdk_min)',
+ 'mac_sdk_path': '<(mac_sdk_path)',
+ 'mac_deployment_target': '<(mac_deployment_target)',
+
+ # Enable clang on mac by default!
+ 'clang%': 1,
+
+ # Compile in Breakpad support by default so that it can be
+ # tested, even if it is not enabled by default at runtime.
+ 'mac_breakpad_compiled_in%': 1,
+ 'conditions': [
+ # mac_product_name is set to the name of the .app bundle as it should
+ # appear on disk. This duplicates data from
+ # chrome/app/theme/chromium/BRANDING and
+ # chrome/app/theme/google_chrome/BRANDING, but is necessary to get
+ # these names into the build system.
+ ['branding=="Chrome"', {
+ 'mac_product_name%': 'Google Chrome',
+ }, { # else: branding!="Chrome"
+ 'mac_product_name%': 'Chromium',
+ }],
+
+ ['branding=="Chrome" and buildtype=="Official"', {
+ 'mac_sdk%': '<!(<(PYTHON) <(DEPTH)/build/mac/find_sdk.py --verify <(mac_sdk_min) --sdk_path=<(mac_sdk_path))',
+ # Enable uploading crash dumps.
+ 'mac_breakpad_uploads%': 1,
+ # Enable dumping symbols at build time for use by Mac Breakpad.
+ 'mac_breakpad%': 1,
+ # Enable Keystone auto-update support.
+ 'mac_keystone%': 1,
+ }, { # else: branding!="Chrome" or buildtype!="Official"
+ 'mac_sdk%': '<!(<(PYTHON) <(DEPTH)/build/mac/find_sdk.py <(mac_sdk_min))',
+ 'mac_breakpad_uploads%': 0,
+ 'mac_breakpad%': 0,
+ 'mac_keystone%': 0,
+ }],
+ ],
+ }], # OS=="mac"
+
+ ['OS=="win"', {
+ 'conditions': [
+ ['component=="shared_library"', {
+ 'win_use_allocator_shim%': 0,
+ }],
+ ['component=="shared_library" and "<(GENERATOR)"=="ninja"', {
+ # Only enabled by default for ninja because it's buggy in VS.
+ # Not enabled for component=static_library because some targets
+ # are too large and the toolchain fails due to the size of the
+ # .obj files.
+ 'incremental_chrome_dll%': 1,
+ }],
+ # Don't do incremental linking for large modules on 32-bit.
+ ['MSVS_OS_BITS==32', {
+ 'msvs_large_module_debug_link_mode%': '1', # No
+ },{
+ 'msvs_large_module_debug_link_mode%': '2', # Yes
+ }],
+ ['MSVS_VERSION=="2010e" or MSVS_VERSION=="2008e" or MSVS_VERSION=="2005e"', {
+ 'msvs_express%': 1,
+ 'secure_atl%': 0,
+ },{
+ 'msvs_express%': 0,
+ 'secure_atl%': 1,
+ }],
+ ],
+ 'nacl_win64_defines': [
+ # This flag is used to minimize dependencies when building
+ # Native Client loader for 64-bit Windows.
+ 'NACL_WIN64',
+ ],
+ }],
+
+ ['os_posix==1 and chromeos==0 and OS!="android"', {
+ 'use_cups%': 1,
+ }, {
+ 'use_cups%': 0,
+ }],
+
+ # Native Client glibc toolchain is enabled by default except on arm.
+ ['target_arch=="arm"', {
+ 'disable_glibc%': 1,
+ }, {
+ 'disable_glibc%': 0,
+ }],
+
+ # Disable SSE2 when building for ARM or MIPS.
+ ['target_arch=="arm" or target_arch=="mipsel"', {
+ 'disable_sse2%': 1,
+ }, {
+ 'disable_sse2%': '<(disable_sse2)',
+ }],
+
+ # Set the relative path from this file to the GYP file of the JPEG
+ # library used by Chromium.
+ ['use_system_libjpeg==1 or use_libjpeg_turbo==0', {
+ # Configuration for using the system libjeg is here.
+ 'libjpeg_gyp_path': '../third_party/libjpeg/libjpeg.gyp',
+ }, {
+ 'libjpeg_gyp_path': '../third_party/libjpeg_turbo/libjpeg.gyp',
+ }],
+
+ # Options controlling the use of GConf (the classic GNOME configuration
+ # system) and GIO, which contains GSettings (the new GNOME config system).
+ ['chromeos==1', {
+ 'use_gconf%': 0,
+ 'use_gio%': 0,
+ }, {
+ 'use_gconf%': 1,
+ 'use_gio%': 1,
+ }],
+
+ # Set up -D and -E flags passed into grit.
+ ['branding=="Chrome"', {
+ # TODO(mmoss) The .grd files look for _google_chrome, but for
+ # consistency they should look for google_chrome_build like C++.
+ 'grit_defines': ['-D', '_google_chrome',
+ '-E', 'CHROMIUM_BUILD=google_chrome'],
+ }, {
+ 'grit_defines': ['-D', '_chromium',
+ '-E', 'CHROMIUM_BUILD=chromium'],
+ }],
+ ['chromeos==1', {
+ 'grit_defines': ['-D', 'chromeos', '-D', 'scale_factors=2x'],
+ }],
+ ['toolkit_views==1', {
+ 'grit_defines': ['-D', 'toolkit_views'],
+ }],
+ ['use_aura==1', {
+ 'grit_defines': ['-D', 'use_aura'],
+ }],
+ ['use_ash==1', {
+ 'grit_defines': ['-D', 'use_ash'],
+ }],
+ ['use_nss==1', {
+ 'grit_defines': ['-D', 'use_nss'],
+ }],
+ ['file_manager_extension==1', {
+ 'grit_defines': ['-D', 'file_manager_extension'],
+ }],
+ ['remoting==1', {
+ 'grit_defines': ['-D', 'remoting'],
+ }],
+ ['use_titlecase_in_grd_files==1', {
+ 'grit_defines': ['-D', 'use_titlecase'],
+ }],
+ ['use_third_party_translations==1', {
+ 'grit_defines': ['-D', 'use_third_party_translations'],
+ 'locales': [
+ 'ast', 'bs', 'ca@valencia', 'en-AU', 'eo', 'eu', 'gl', 'hy', 'ia',
+ 'ka', 'ku', 'kw', 'ms', 'ug'
+ ],
+ }],
+ ['OS=="android"', {
+ 'grit_defines': ['-D', 'android'],
+ }],
+ ['OS=="mac"', {
+ 'grit_defines': ['-D', 'scale_factors=2x'],
+ }],
+ ['OS == "ios"', {
+ 'grit_defines': [
+ # define for iOS specific resources.
+ '-D', 'ios',
+ # iOS uses a whitelist to filter resources.
+ '-w', '<(DEPTH)/build/ios/grit_whitelist.txt'
+ ],
+ }],
+ ['enable_extensions==1', {
+ 'grit_defines': ['-D', 'enable_extensions'],
+ }],
+ ['enable_printing==1', {
+ 'grit_defines': ['-D', 'enable_printing'],
+ }],
+ ['enable_themes==1', {
+ 'grit_defines': ['-D', 'enable_themes'],
+ }],
+ ['use_oem_wallpaper==1', {
+ 'grit_defines': ['-D', 'use_oem_wallpaper'],
+ }],
+ ['clang_use_chrome_plugins==1 and OS!="win"', {
+ 'clang_chrome_plugins_flags': [
+ '<!@(<(DEPTH)/tools/clang/scripts/plugin_flags.sh)'
+ ],
+ }],
+
+ ['enable_web_intents_tag==1', {
+ 'grit_defines': ['-D', 'enable_web_intents_tag'],
+ }],
+
+ ['asan==1', {
+ 'clang%': 1,
+ }],
+ ['asan==1 and OS=="mac"', {
+ # See http://crbug.com/145503.
+ 'component': "static_library",
+ }],
+ ['tsan==1', {
+ 'clang%': 1,
+ }],
+
+ ['OS=="linux" and clang_type_profiler==1', {
+ 'clang%': 1,
+ 'clang_use_chrome_plugins%': 0,
+ 'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_x64',
+ }],
+
+ # On valgrind bots, override the optimizer settings so we don't inline too
+ # much and make the stacks harder to figure out.
+ #
+ # TODO(rnk): Kill off variables that no one else uses and just implement
+ # them under a build_for_tool== condition.
+ ['build_for_tool=="memcheck" or build_for_tool=="tsan"', {
+ # gcc flags
+ 'mac_debug_optimization': '1',
+ 'mac_release_optimization': '1',
+ 'release_optimize': '1',
+ 'no_gc_sections': 1,
+ 'debug_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+ '-fno-builtin -fno-optimize-sibling-calls',
+ 'release_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+ '-fno-builtin -fno-optimize-sibling-calls',
+
+ # MSVS flags for TSan on Pin and Windows.
+ 'win_debug_RuntimeChecks': '0',
+ 'win_debug_disable_iterator_debugging': '1',
+ 'win_debug_Optimization': '1',
+ 'win_debug_InlineFunctionExpansion': '0',
+ 'win_release_InlineFunctionExpansion': '0',
+ 'win_release_OmitFramePointers': '0',
+
+ 'linux_use_tcmalloc': 1,
+ 'release_valgrind_build': 1,
+ 'werror': '',
+ 'component': 'static_library',
+ 'use_system_zlib': 0,
+ }],
+
+ # Build tweaks for DrMemory.
+ # TODO(rnk): Combine with tsan config to share the builder.
+ # http://crbug.com/108155
+ ['build_for_tool=="drmemory"', {
+ # These runtime checks force initialization of stack vars which blocks
+ # DrMemory's uninit detection.
+ 'win_debug_RuntimeChecks': '0',
+ # Iterator debugging is slow.
+ 'win_debug_disable_iterator_debugging': '1',
+ # Try to disable optimizations that mess up stacks in a release build.
+ 'win_release_InlineFunctionExpansion': '0',
+ 'win_release_OmitFramePointers': '0',
+ # Ditto for debug, to support bumping win_debug_Optimization.
+ 'win_debug_InlineFunctionExpansion': 0,
+ 'win_debug_OmitFramePointers': 0,
+ # Keep the code under #ifndef NVALGRIND.
+ 'release_valgrind_build': 1,
+ }],
+ ],
+
+ # List of default apps to install in new profiles. The first list contains
+ # the source files as found in svn. The second list, used only for linux,
+ # contains the destination location for each of the files. When a crx
+ # is added or removed from the list, the chrome/browser/resources/
+ # default_apps/external_extensions.json file must also be updated.
+ 'default_apps_list': [
+ 'browser/resources/default_apps/external_extensions.json',
+ 'browser/resources/default_apps/gmail.crx',
+ 'browser/resources/default_apps/search.crx',
+ 'browser/resources/default_apps/youtube.crx',
+ 'browser/resources/default_apps/drive.crx',
+ 'browser/resources/default_apps/docs.crx',
+ ],
+ 'default_apps_list_linux_dest': [
+ '<(PRODUCT_DIR)/default_apps/external_extensions.json',
+ '<(PRODUCT_DIR)/default_apps/gmail.crx',
+ '<(PRODUCT_DIR)/default_apps/search.crx',
+ '<(PRODUCT_DIR)/default_apps/youtube.crx',
+ '<(PRODUCT_DIR)/default_apps/drive.crx',
+ '<(PRODUCT_DIR)/default_apps/docs.crx',
+ ],
+ },
+ 'target_defaults': {
+ 'variables': {
+ # The condition that operates on chromium_code is in a target_conditions
+ # section, and will not have access to the default fallback value of
+ # chromium_code at the top of this file, or to the chromium_code
+ # variable placed at the root variables scope of .gyp files, because
+ # those variables are not set at target scope. As a workaround,
+ # if chromium_code is not set at target scope, define it in target scope
+ # to contain whatever value it has during early variable expansion.
+ # That's enough to make it available during target conditional
+ # processing.
+ 'chromium_code%': '<(chromium_code)',
+
+ # See http://msdn.microsoft.com/en-us/library/aa652360(VS.71).aspx
+ 'win_release_Optimization%': '2', # 2 = /Os
+ 'win_debug_Optimization%': '0', # 0 = /Od
+
+ # See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
+ # Tri-state: blank is default, 1 on, 0 off
+ 'win_release_OmitFramePointers%': '0',
+ # Tri-state: blank is default, 1 on, 0 off
+ 'win_debug_OmitFramePointers%': '',
+
+ # See http://msdn.microsoft.com/en-us/library/8wtf2dfz(VS.71).aspx
+ 'win_debug_RuntimeChecks%': '3', # 3 = all checks enabled, 0 = off
+
+ # See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
+ 'win_debug_InlineFunctionExpansion%': '', # empty = default, 0 = off,
+ 'win_release_InlineFunctionExpansion%': '2', # 1 = only __inline, 2 = max
+
+ # VS inserts quite a lot of extra checks to algorithms like
+ # std::partial_sort in Debug build which make them O(N^2)
+ # instead of O(N*logN). This is particularly slow under memory
+ # tools like ThreadSanitizer so we want it to be disablable.
+ # See http://msdn.microsoft.com/en-us/library/aa985982(v=VS.80).aspx
+ 'win_debug_disable_iterator_debugging%': '0',
+
+ 'release_extra_cflags%': '',
+ 'debug_extra_cflags%': '',
+
+ 'release_valgrind_build%': '<(release_valgrind_build)',
+
+ # the non-qualified versions are widely assumed to be *nix-only
+ 'win_release_extra_cflags%': '',
+ 'win_debug_extra_cflags%': '',
+
+ # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+ 'enable_wexit_time_destructors%': '<(enable_wexit_time_destructors)',
+
+ # Only used by Windows build for now. Can be used to build into a
+ # differet output directory, e.g., a build_dir_prefix of VS2010_ would
+ # output files in src/build/VS2010_{Debug,Release}.
+ 'build_dir_prefix%': '',
+
+ # Targets are by default not nacl untrusted code.
+ 'nacl_untrusted_build%': 0,
+
+ 'conditions': [
+ ['OS=="win" and component=="shared_library"', {
+ # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+ 'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+ 'win_debug_RuntimeLibrary%': '3', # 3 = /MDd (debug DLL)
+ }, {
+ # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+ 'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+ 'win_debug_RuntimeLibrary%': '1', # 1 = /MTd (debug static)
+ }],
+ ['OS=="ios"', {
+ # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+ 'mac_release_optimization%': 's', # Use -Os unless overridden
+ 'mac_debug_optimization%': '0', # Use -O0 unless overridden
+ }, {
+ # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+ 'mac_release_optimization%': '3', # Use -O3 unless overridden
+ 'mac_debug_optimization%': '0', # Use -O0 unless overridden
+ }],
+ ],
+ },
+ 'conditions': [
+ ['OS=="linux" and linux_use_tcmalloc==1 and clang_type_profiler==1', {
+ 'cflags_cc!': ['-fno-rtti'],
+ 'cflags_cc+': [
+ '-frtti',
+ '-gline-tables-only',
+ '-fintercept-allocation-functions',
+ ],
+ 'defines': ['TYPE_PROFILING'],
+ 'dependencies': [
+ '<(DEPTH)/base/allocator/allocator.gyp:type_profiler',
+ ],
+ }],
+ ['OS=="win" and "<(msbuild_toolset)"!=""', {
+ 'msbuild_toolset': '<(msbuild_toolset)',
+ }],
+ ['branding=="Chrome"', {
+ 'defines': ['GOOGLE_CHROME_BUILD'],
+ }, { # else: branding!="Chrome"
+ 'defines': ['CHROMIUM_BUILD'],
+ }],
+ ['OS=="mac" and component=="shared_library"', {
+ 'xcode_settings': {
+ 'DYLIB_INSTALL_NAME_BASE': '@rpath',
+ 'LD_RUNPATH_SEARCH_PATHS': [
+ # For unbundled binaries.
+ '@loader_path/.',
+ # For bundled binaries, to get back from Binary.app/Contents/MacOS.
+ '@loader_path/../../..',
+ ],
+ },
+ }],
+ ['branding=="Chrome" and (OS=="win" or OS=="mac")', {
+ 'defines': ['ENABLE_RLZ'],
+ }],
+ ['component=="shared_library"', {
+ 'defines': ['COMPONENT_BUILD'],
+ }],
+ ['toolkit_views==1', {
+ 'defines': ['TOOLKIT_VIEWS=1'],
+ }],
+ ['ui_compositor_image_transport==1', {
+ 'defines': ['UI_COMPOSITOR_IMAGE_TRANSPORT'],
+ }],
+ ['use_aura==1', {
+ 'defines': ['USE_AURA=1'],
+ }],
+ ['use_ash==1', {
+ 'defines': ['USE_ASH=1'],
+ }],
+ ['use_libjpeg_turbo==1', {
+ 'defines': ['USE_LIBJPEG_TURBO=1'],
+ }],
+ ['use_nss==1', {
+ 'defines': ['USE_NSS=1'],
+ }],
+ ['enable_one_click_signin==1', {
+ 'defines': ['ENABLE_ONE_CLICK_SIGNIN'],
+ }],
+ ['toolkit_uses_gtk==1 and toolkit_views==0', {
+ # TODO(erg): We are progressively sealing up use of deprecated features
+ # in gtk in preparation for an eventual porting to gtk3.
+ 'defines': ['GTK_DISABLE_SINGLE_INCLUDES=1'],
+ }],
+ ['chromeos==1', {
+ 'defines': ['OS_CHROMEOS=1'],
+ }],
+ ['use_xi2_mt!=0', {
+ 'defines': ['USE_XI2_MT=<(use_xi2_mt)'],
+ }],
+ ['file_manager_extension==1', {
+ 'defines': ['FILE_MANAGER_EXTENSION=1'],
+ }],
+ ['profiling==1', {
+ 'defines': ['ENABLE_PROFILING=1'],
+ }],
+ ['OS=="linux" and glibcxx_debug==1', {
+ 'defines': ['_GLIBCXX_DEBUG=1',],
+ 'cflags_cc!': ['-fno-rtti'],
+ 'cflags_cc+': ['-frtti', '-g'],
+ }],
+ ['OS=="linux"', {
+ # we need lrint(), which is ISOC99, and Xcode
+ # already forces -std=c99 for mac below
+ 'defines': ['_ISOC99_SOURCE=1'],
+ }],
+ ['remoting==1', {
+ 'defines': ['ENABLE_REMOTING=1'],
+ }],
+ ['enable_webrtc==1', {
+ 'defines': ['ENABLE_WEBRTC=1'],
+ }],
+ ['proprietary_codecs==1', {
+ 'defines': ['USE_PROPRIETARY_CODECS'],
+ }],
+ ['enable_pepper_threading==1', {
+ 'defines': ['ENABLE_PEPPER_THREADING'],
+ }],
+ ['enable_viewport==1', {
+ 'defines': ['ENABLE_VIEWPORT'],
+ }],
+ ['configuration_policy==1', {
+ 'defines': ['ENABLE_CONFIGURATION_POLICY'],
+ }],
+ ['input_speech==1', {
+ 'defines': ['ENABLE_INPUT_SPEECH'],
+ }],
+ ['notifications==1', {
+ 'defines': ['ENABLE_NOTIFICATIONS'],
+ }],
+ ['enable_hidpi==1', {
+ 'defines': ['ENABLE_HIDPI=1'],
+ }],
+ ['fastbuild!=0', {
+
+ 'conditions': [
+ # For Windows and Mac, we don't genererate debug information.
+ ['OS=="win" or OS=="mac"', {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'false',
+ },
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '0',
+ }
+ },
+ 'xcode_settings': {
+ 'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
+ },
+ }, { # else: OS != "win", generate less debug information.
+ 'variables': {
+ 'debug_extra_cflags': '-g1',
+ },
+ }],
+ # Clang creates chubby debug information, which makes linking very
+ # slow. For now, don't create debug information with clang. See
+ # http://crbug.com/70000
+ ['(OS=="linux" or OS=="android") and clang==1', {
+ 'variables': {
+ 'debug_extra_cflags': '-g0',
+ },
+ }],
+ ], # conditions for fastbuild.
+ }], # fastbuild!=0
+ ['dcheck_always_on!=0', {
+ 'defines': ['DCHECK_ALWAYS_ON=1'],
+ }], # dcheck_always_on!=0
+ ['selinux==1', {
+ 'defines': ['CHROMIUM_SELINUX=1'],
+ }],
+ ['win_use_allocator_shim==0', {
+ 'conditions': [
+ ['OS=="win"', {
+ 'defines': ['NO_TCMALLOC'],
+ }],
+ ],
+ }],
+ ['enable_gpu==1', {
+ 'defines': [
+ 'ENABLE_GPU=1',
+ ],
+ }],
+ ['use_openssl==1', {
+ 'defines': [
+ 'USE_OPENSSL=1',
+ ],
+ }],
+ ['enable_eglimage==1', {
+ 'defines': [
+ 'ENABLE_EGLIMAGE=1',
+ ],
+ }],
+ ['use_skia==1', {
+ 'defines': [
+ 'USE_SKIA=1',
+ ],
+ }],
+ ['coverage!=0', {
+ 'conditions': [
+ ['OS=="mac" or OS=="ios"', {
+ 'xcode_settings': {
+ 'GCC_INSTRUMENT_PROGRAM_FLOW_ARCS': 'YES', # -fprofile-arcs
+ 'GCC_GENERATE_TEST_COVERAGE_FILES': 'YES', # -ftest-coverage
+ },
+ }],
+ ['OS=="mac"', {
+ # Add -lgcov for types executable, shared_library, and
+ # loadable_module; not for static_library.
+ # This is a delayed conditional.
+ 'target_conditions': [
+ ['_type!="static_library"', {
+ 'xcode_settings': { 'OTHER_LDFLAGS': [ '-lgcov' ] },
+ }],
+ ],
+ }],
+ ['OS=="linux" or OS=="android"', {
+ 'cflags': [ '-ftest-coverage',
+ '-fprofile-arcs' ],
+ 'link_settings': { 'libraries': [ '-lgcov' ] },
+ }],
+ # Finally, for Windows, we simply turn on profiling.
+ ['OS=="win"', {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'Profile': 'true',
+ },
+ 'VCCLCompilerTool': {
+ # /Z7, not /Zi, so coverage is happyb
+ 'DebugInformationFormat': '1',
+ 'AdditionalOptions': ['/Yd'],
+ }
+ }
+ }], # OS==win
+ ], # conditions for coverage
+ }], # coverage!=0
+ ['OS=="win"', {
+ 'defines': [
+ '__STD_C',
+ '_CRT_SECURE_NO_DEPRECATE',
+ '_SCL_SECURE_NO_DEPRECATE',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/third_party/wtl/include',
+ ],
+ 'conditions': [
+ ['win_z7!=0', {
+ 'msvs_settings': {
+ # Generates debug info when win_z7=1
+ # even if fastbuild=1 (that makes GenerateDebugInformation false).
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ },
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '1',
+ }
+ }
+ }],
+ ], # win_z7!=0
+ }], # OS==win
+ ['enable_task_manager==1', {
+ 'defines': [
+ 'ENABLE_TASK_MANAGER=1',
+ ],
+ }],
+ ['enable_web_intents==1', {
+ 'defines': [
+ 'ENABLE_WEB_INTENTS=1',
+ ],
+ }],
+ ['enable_extensions==1', {
+ 'defines': [
+ 'ENABLE_EXTENSIONS=1',
+ ],
+ }],
+ ['OS=="win" and branding=="Chrome"', {
+ 'defines': ['ENABLE_SWIFTSHADER'],
+ }],
+ ['enable_dart==1', {
+ 'defines': ['WEBKIT_USING_DART=1'],
+ }],
+ ['enable_plugin_installation==1', {
+ 'defines': ['ENABLE_PLUGIN_INSTALLATION=1'],
+ }],
+ ['enable_protector_service==1', {
+ 'defines': ['ENABLE_PROTECTOR_SERVICE=1'],
+ }],
+ ['enable_session_service==1', {
+ 'defines': ['ENABLE_SESSION_SERVICE=1'],
+ }],
+ ['enable_themes==1', {
+ 'defines': ['ENABLE_THEMES=1'],
+ }],
+ ['enable_background==1', {
+ 'defines': ['ENABLE_BACKGROUND=1'],
+ }],
+ ['enable_automation==1', {
+ 'defines': ['ENABLE_AUTOMATION=1'],
+ }],
+ ['enable_printing==1', {
+ 'defines': ['ENABLE_PRINTING=1'],
+ }],
+ ['enable_captive_portal_detection==1', {
+ 'defines': ['ENABLE_CAPTIVE_PORTAL_DETECTION=1'],
+ }],
+ ['disable_ftp_support==1', {
+ 'defines': ['DISABLE_FTP_SUPPORT=1'],
+ }],
+ ], # conditions for 'target_defaults'
+ 'target_conditions': [
+ ['enable_wexit_time_destructors==1', {
+ 'conditions': [
+ [ 'clang==1', {
+ 'cflags': [
+ '-Wexit-time-destructors',
+ ],
+ 'xcode_settings': {
+ 'WARNING_CFLAGS': [
+ '-Wexit-time-destructors',
+ ],
+ },
+ }],
+ ],
+ }],
+ ['chromium_code==0', {
+ 'conditions': [
+ [ 'os_posix==1 and OS!="mac" and OS!="ios"', {
+ # We don't want to get warnings from third-party code,
+ # so remove any existing warning-enabling flags like -Wall.
+ 'cflags!': [
+ '-Wall',
+ '-Wextra',
+ ],
+ 'cflags_cc': [
+ # Don't warn about hash_map in third-party code.
+ '-Wno-deprecated',
+ ],
+ 'cflags': [
+ # Don't warn about printf format problems.
+ # This is off by default in gcc but on in Ubuntu's gcc(!).
+ '-Wno-format',
+ ],
+ 'cflags_cc!': [
+ # TODO(fischman): remove this.
+ # http://code.google.com/p/chromium/issues/detail?id=90453
+ '-Wsign-compare',
+ ]
+ }],
+ # TODO: Fix all warnings on chromeos too.
+ [ 'os_posix==1 and OS!="mac" and OS!="ios" and (clang!=1 or chromeos==1)', {
+ 'cflags!': [
+ '-Werror',
+ ],
+ }],
+ [ 'os_posix==1 and os_bsd!=1 and OS!="mac" and OS!="android"', {
+ 'cflags': [
+ # Don't warn about ignoring the return value from e.g. close().
+ # This is off by default in some gccs but on by default in others.
+ # BSD systems do not support this option, since they are usually
+ # using gcc 4.2.1, which does not have this flag yet.
+ '-Wno-unused-result',
+ ],
+ }],
+ [ 'OS=="win"', {
+ 'defines': [
+ '_CRT_SECURE_NO_DEPRECATE',
+ '_CRT_NONSTDC_NO_WARNINGS',
+ '_CRT_NONSTDC_NO_DEPRECATE',
+ '_SCL_SECURE_NO_DEPRECATE',
+ ],
+ 'msvs_disabled_warnings': [4800],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'WarningLevel': '3',
+ 'WarnAsError': '<(win_third_party_warn_as_error)',
+ 'Detect64BitPortabilityProblems': 'false',
+ },
+ },
+ 'conditions': [
+ ['buildtype=="Official"', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': { 'WarnAsError': 'false' },
+ }
+ }],
+ ],
+ }],
+ # TODO(darin): Unfortunately, some third_party code depends on base/
+ [ 'OS=="win" and component=="shared_library"', {
+ 'msvs_disabled_warnings': [
+ 4251, # class 'std::xx' needs to have dll-interface.
+ ],
+ }],
+ [ 'OS=="mac" or OS=="ios"', {
+ 'xcode_settings': {
+ 'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
+ },
+ 'conditions': [
+ ['buildtype=="Official"', {
+ 'xcode_settings': {
+ 'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO', # -Werror
+ },
+ }],
+ ],
+ }],
+ [ 'OS=="ios"', {
+ 'xcode_settings': {
+ # TODO(ios): Fix remaining warnings in third-party code, then
+ # remove this; the Mac cleanup didn't get everything that's
+ # flagged in an iOS build.
+ 'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
+ 'RUN_CLANG_STATIC_ANALYZER': 'NO',
+ },
+ }],
+ ],
+ }, {
+ 'includes': [
+ # Rules for excluding e.g. foo_win.cc from the build on non-Windows.
+ 'filename_rules.gypi',
+ ],
+ # In Chromium code, we define __STDC_FORMAT_MACROS in order to get the
+ # C99 macros on Mac and Linux.
+ 'defines': [
+ '__STDC_FORMAT_MACROS',
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ # turn on warnings for signed/unsigned mismatch on chromium code.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': ['/we4389'],
+ },
+ },
+ }],
+ ['OS=="win" and component=="shared_library"', {
+ 'msvs_disabled_warnings': [
+ 4251, # class 'std::xx' needs to have dll-interface.
+ ],
+ }],
+ ],
+ }],
+ ], # target_conditions for 'target_defaults'
+ 'default_configuration': 'Debug',
+ 'configurations': {
+ # VCLinkerTool LinkIncremental values below:
+ # 0 == default
+ # 1 == /INCREMENTAL:NO
+ # 2 == /INCREMENTAL
+ # Debug links incremental, Release does not.
+ #
+ # Abstract base configurations to cover common attributes.
+ #
+ 'Common_Base': {
+ 'abstract': 1,
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)',
+ 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+ 'CharacterSet': '1',
+ },
+ },
+ 'x86_Base': {
+ 'abstract': 1,
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TargetMachine': '1',
+ },
+ },
+ 'msvs_configuration_platform': 'Win32',
+ },
+ 'x64_Base': {
+ 'abstract': 1,
+ 'msvs_configuration_platform': 'x64',
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'TargetMachine': '17', # x86 - 64
+ 'AdditionalLibraryDirectories!':
+ ['<(windows_sdk_path)/Lib/win8/um/x86'],
+ 'AdditionalLibraryDirectories':
+ ['<(windows_sdk_path)/Lib/win8/um/x64'],
+ },
+ 'VCLibrarianTool': {
+ 'AdditionalLibraryDirectories!':
+ ['<(windows_sdk_path)/Lib/win8/um/x86'],
+ 'AdditionalLibraryDirectories':
+ ['<(windows_sdk_path)/Lib/win8/um/x64'],
+ },
+ },
+ 'defines': [
+ # Not sure if tcmalloc works on 64-bit Windows.
+ 'NO_TCMALLOC',
+ ],
+ },
+ 'Debug_Base': {
+ 'abstract': 1,
+ 'defines': [
+ 'DYNAMIC_ANNOTATIONS_ENABLED=1',
+ 'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+ ],
+ 'xcode_settings': {
+ 'COPY_PHASE_STRIP': 'NO',
+ 'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
+ 'OTHER_CFLAGS': [
+ '<@(debug_extra_cflags)',
+ ],
+ },
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '<(win_debug_Optimization)',
+ 'PreprocessorDefinitions': ['_DEBUG'],
+ 'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)',
+ 'RuntimeLibrary': '<(win_debug_RuntimeLibrary)',
+ 'conditions': [
+ # According to MSVS, InlineFunctionExpansion=0 means
+ # "default inlining", not "/Ob0".
+ # Thus, we have to handle InlineFunctionExpansion==0 separately.
+ ['win_debug_InlineFunctionExpansion==0', {
+ 'AdditionalOptions': ['/Ob0'],
+ }],
+ ['win_debug_InlineFunctionExpansion!=""', {
+ 'InlineFunctionExpansion':
+ '<(win_debug_InlineFunctionExpansion)',
+ }],
+ ['win_debug_disable_iterator_debugging==1', {
+ 'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'],
+ }],
+
+ # if win_debug_OmitFramePointers is blank, leave as default
+ ['win_debug_OmitFramePointers==1', {
+ 'OmitFramePointers': 'true',
+ }],
+ ['win_debug_OmitFramePointers==0', {
+ 'OmitFramePointers': 'false',
+ # The above is not sufficient (http://crbug.com/106711): it
+ # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+ # perform FPO regardless, so we must explicitly disable.
+ # We still want the false setting above to avoid having
+ # "/Oy /Oy-" and warnings about overriding.
+ 'AdditionalOptions': ['/Oy-'],
+ }],
+ ],
+ 'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ],
+ },
+ 'VCLinkerTool': {
+ 'LinkIncremental': '<(msvs_debug_link_incremental)',
+ # ASLR makes debugging with windbg difficult because Chrome.exe and
+ # Chrome.dll share the same base name. As result, windbg will
+ # name the Chrome.dll module like chrome_<base address>, where
+ # <base address> typically changes with each launch. This in turn
+ # means that breakpoints in Chrome.dll don't stick from one launch
+ # to the next. For this reason, we turn ASLR off in debug builds.
+ # Note that this is a three-way bool, where 0 means to pick up
+ # the default setting, 1 is off and 2 is on.
+ 'RandomizedBaseAddress': 1,
+ },
+ 'VCResourceCompilerTool': {
+ 'PreprocessorDefinitions': ['_DEBUG'],
+ },
+ },
+ 'conditions': [
+ ['OS=="linux" or OS=="android"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '<@(debug_extra_cflags)',
+ ],
+ }],
+ ],
+ }],
+ # Disabled on iOS because it was causing a crash on startup.
+ # TODO(michelea): investigate, create a reduced test and possibly
+ # submit a radar.
+ ['release_valgrind_build==0 and OS!="ios"', {
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fstack-protector-all', # Implies -fstack-protector
+ ],
+ },
+ }],
+ ],
+ },
+ 'Release_Base': {
+ 'abstract': 1,
+ 'defines': [
+ 'NDEBUG',
+ ],
+ 'xcode_settings': {
+ 'DEAD_CODE_STRIPPING': 'YES', # -Wl,-dead_strip
+ 'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)',
+ 'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ],
+ },
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'RuntimeLibrary': '<(win_release_RuntimeLibrary)',
+ 'conditions': [
+ # In official builds, each target will self-select
+ # an optimization level.
+ ['buildtype!="Official"', {
+ 'Optimization': '<(win_release_Optimization)',
+ },
+ ],
+ # According to MSVS, InlineFunctionExpansion=0 means
+ # "default inlining", not "/Ob0".
+ # Thus, we have to handle InlineFunctionExpansion==0 separately.
+ ['win_release_InlineFunctionExpansion==0', {
+ 'AdditionalOptions': ['/Ob0'],
+ }],
+ ['win_release_InlineFunctionExpansion!=""', {
+ 'InlineFunctionExpansion':
+ '<(win_release_InlineFunctionExpansion)',
+ }],
+
+ # if win_release_OmitFramePointers is blank, leave as default
+ ['win_release_OmitFramePointers==1', {
+ 'OmitFramePointers': 'true',
+ }],
+ ['win_release_OmitFramePointers==0', {
+ 'OmitFramePointers': 'false',
+ # The above is not sufficient (http://crbug.com/106711): it
+ # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+ # perform FPO regardless, so we must explicitly disable.
+ # We still want the false setting above to avoid having
+ # "/Oy /Oy-" and warnings about overriding.
+ 'AdditionalOptions': ['/Oy-'],
+ }],
+ ],
+ 'AdditionalOptions': [ '<@(win_release_extra_cflags)', ],
+ },
+ 'VCLinkerTool': {
+ # LinkIncremental is a tri-state boolean, where 0 means default
+ # (i.e., inherit from parent solution), 1 means false, and
+ # 2 means true.
+ 'LinkIncremental': '1',
+ # This corresponds to the /PROFILE flag which ensures the PDB
+ # file contains FIXUP information (growing the PDB file by about
+ # 5%) but does not otherwise alter the output binary. This
+ # information is used by the Syzygy optimization tool when
+ # decomposing the release image.
+ 'Profile': 'true',
+ },
+ },
+ 'conditions': [
+ ['msvs_use_common_release', {
+ 'includes': ['release.gypi'],
+ }],
+ ['release_valgrind_build==0', {
+ 'defines': [
+ 'NVALGRIND',
+ 'DYNAMIC_ANNOTATIONS_ENABLED=0',
+ ],
+ }, {
+ 'defines': [
+ 'DYNAMIC_ANNOTATIONS_ENABLED=1',
+ 'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+ ],
+ }],
+ ['win_use_allocator_shim==0', {
+ 'defines': ['NO_TCMALLOC'],
+ }],
+ ['OS=="linux"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '<@(release_extra_cflags)',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ #
+ # Concrete configurations
+ #
+ 'Debug': {
+ 'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'],
+ },
+ 'Release': {
+ 'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+ },
+ 'conditions': [
+ [ 'OS=="win"', {
+ # TODO(bradnelson): add a gyp mechanism to make this more graceful.
+ 'Debug_x64': {
+ 'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'],
+ },
+ 'Release_x64': {
+ 'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'],
+ },
+ }],
+ ],
+ },
+ },
+ 'conditions': [
+ ['os_posix==1 and OS!="mac" and OS!="ios"', {
+ 'target_defaults': {
+ # Enable -Werror by default, but put it in a variable so it can
+ # be disabled in ~/.gyp/include.gypi on the valgrind builders.
+ 'variables': {
+ 'werror%': '-Werror',
+ 'libraries_for_target%': '',
+ },
+ 'defines': [
+ '_FILE_OFFSET_BITS=64',
+ ],
+ 'cflags': [
+ '<(werror)', # See note above about the werror variable.
+ '-pthread',
+ '-fno-exceptions',
+ '-fno-strict-aliasing', # See http://crbug.com/32204
+ '-Wall',
+ # TODO(evan): turn this back on once all the builds work.
+ # '-Wextra',
+ # Don't warn about unused function params. We use those everywhere.
+ '-Wno-unused-parameter',
+ # Don't warn about the "struct foo f = {0};" initialization pattern.
+ '-Wno-missing-field-initializers',
+ # Don't export any symbols (for example, to plugins we dlopen()).
+ # Note: this is *required* to make some plugins work.
+ '-fvisibility=hidden',
+ '-pipe',
+ ],
+ 'cflags_cc': [
+ '-fno-rtti',
+ '-fno-threadsafe-statics',
+ # Make inline functions have hidden visiblity by default.
+ # Surprisingly, not covered by -fvisibility=hidden.
+ '-fvisibility-inlines-hidden',
+ # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+ # so we specify it explicitly.
+ # TODO(fischman): remove this if http://llvm.org/PR10448 obsoletes it.
+ # http://code.google.com/p/chromium/issues/detail?id=90453
+ '-Wsign-compare',
+ ],
+ 'ldflags': [
+ '-pthread', '-Wl,-z,noexecstack',
+ ],
+ 'libraries' : [
+ '<(libraries_for_target)',
+ ],
+ 'configurations': {
+ 'Debug_Base': {
+ 'variables': {
+ 'debug_optimize%': '0',
+ },
+ 'defines': [
+ '_DEBUG',
+ ],
+ 'cflags': [
+ '-O>(debug_optimize)',
+ '-g',
+ ],
+ 'conditions' : [
+ ['OS=="android" and android_full_debug==0', {
+ # Some configurations are copied from Release_Base to reduce
+ # the binary size.
+ 'variables': {
+ 'debug_optimize%': 's',
+ },
+ 'cflags': [
+ '-fomit-frame-pointer',
+ '-fdata-sections',
+ '-ffunction-sections',
+ ],
+ 'ldflags': [
+ '-Wl,-O1',
+ '-Wl,--as-needed',
+ '-Wl,--gc-sections',
+ ],
+ }],
+ ],
+ },
+ 'Release_Base': {
+ 'variables': {
+ 'release_optimize%': '2',
+ # Binaries become big and gold is unable to perform GC
+ # and remove unused sections for some of test targets
+ # on 32 bit platform.
+ # (This is currently observed only in chromeos valgrind bots)
+ # The following flag is to disable --gc-sections linker
+ # option for these bots.
+ 'no_gc_sections%': 0,
+
+ # TODO(bradnelson): reexamine how this is done if we change the
+ # expansion of configurations
+ 'release_valgrind_build%': 0,
+ },
+ 'cflags': [
+ '-O<(release_optimize)',
+ # Don't emit the GCC version ident directives, they just end up
+ # in the .comment section taking up binary size.
+ '-fno-ident',
+ # Put data and code in their own sections, so that unused symbols
+ # can be removed at link time with --gc-sections.
+ '-fdata-sections',
+ '-ffunction-sections',
+ ],
+ 'ldflags': [
+ # Specifically tell the linker to perform optimizations.
+ # See http://lwn.net/Articles/192624/ .
+ '-Wl,-O1',
+ '-Wl,--as-needed',
+ ],
+ 'conditions' : [
+ ['no_gc_sections==0', {
+ 'ldflags': [
+ '-Wl,--gc-sections',
+ ],
+ }],
+ ['OS=="android"', {
+ 'variables': {
+ 'release_optimize%': 's',
+ },
+ 'cflags': [
+ '-fomit-frame-pointer',
+ ],
+ }],
+ ['clang==1', {
+ 'cflags!': [
+ '-fno-ident',
+ ],
+ }],
+ ['profiling==1', {
+ 'cflags': [
+ '-fno-omit-frame-pointer',
+ '-g',
+ ],
+ }],
+ ],
+ },
+ },
+ 'variants': {
+ 'coverage': {
+ 'cflags': ['-fprofile-arcs', '-ftest-coverage'],
+ 'ldflags': ['-fprofile-arcs'],
+ },
+ 'profile': {
+ 'cflags': ['-pg', '-g'],
+ 'ldflags': ['-pg'],
+ },
+ 'symbols': {
+ 'cflags': ['-g'],
+ },
+ },
+ 'conditions': [
+ ['target_arch=="ia32"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'asflags': [
+ # Needed so that libs with .s files (e.g. libicudata.a)
+ # are compatible with the general 32-bit-ness.
+ '-32',
+ ],
+ # All floating-point computations on x87 happens in 80-bit
+ # precision. Because the C and C++ language standards allow
+ # the compiler to keep the floating-point values in higher
+ # precision than what's specified in the source and doing so
+ # is more efficient than constantly rounding up to 64-bit or
+ # 32-bit precision as specified in the source, the compiler,
+ # especially in the optimized mode, tries very hard to keep
+ # values in x87 floating-point stack (in 80-bit precision)
+ # as long as possible. This has important side effects, that
+ # the real value used in computation may change depending on
+ # how the compiler did the optimization - that is, the value
+ # kept in 80-bit is different than the value rounded down to
+ # 64-bit or 32-bit. There are possible compiler options to
+ # make this behavior consistent (e.g. -ffloat-store would keep
+ # all floating-values in the memory, thus force them to be
+ # rounded to its original precision) but they have significant
+ # runtime performance penalty.
+ #
+ # -mfpmath=sse -msse2 makes the compiler use SSE instructions
+ # which keep floating-point values in SSE registers in its
+ # native precision (32-bit for single precision, and 64-bit
+ # for double precision values). This means the floating-point
+ # value used during computation does not change depending on
+ # how the compiler optimized the code, since the value is
+ # always kept in its specified precision.
+ 'conditions': [
+ ['branding=="Chromium" and disable_sse2==0', {
+ 'cflags': [
+ '-march=pentium4',
+ '-msse2',
+ '-mfpmath=sse',
+ ],
+ }],
+ # ChromeOS targets Pinetrail, which is sse3, but most of the
+ # benefit comes from sse2 so this setting allows ChromeOS
+ # to build on other CPUs. In the future -march=atom would
+ # help but requires a newer compiler.
+ ['chromeos==1 and disable_sse2==0', {
+ 'cflags': [
+ '-msse2',
+ ],
+ }],
+ # Install packages have started cropping up with
+ # different headers between the 32-bit and 64-bit
+ # versions, so we have to shadow those differences off
+ # and make sure a 32-bit-on-64-bit build picks up the
+ # right files.
+ # For android build, use NDK headers instead of host headers
+ ['host_arch!="ia32" and OS!="android"', {
+ 'include_dirs+': [
+ '/usr/include32',
+ ],
+ }],
+ ],
+ 'target_conditions': [
+ ['_toolset=="target" and OS!="android"', {
+ # -mmmx allows mmintrin.h to be used for mmx intrinsics.
+ # video playback is mmx and sse2 optimized.
+ 'cflags': [
+ '-m32',
+ '-mmmx',
+ ],
+ 'ldflags': [
+ '-m32',
+ ],
+ 'cflags_mozilla': [
+ '-m32',
+ '-mmmx',
+ ],
+ }],
+ ],
+ }],
+ ],
+ }],
+ ['target_arch=="arm"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags_cc': [
+ # The codesourcery arm-2009q3 toolchain warns at that the ABI
+ # has changed whenever it encounters a varargs function. This
+ # silences those warnings, as they are not helpful and
+ # clutter legitimate warnings.
+ '-Wno-abi',
+ ],
+ 'conditions': [
+ ['arm_thumb==1', {
+ 'cflags': [
+ '-mthumb',
+ ]
+ }],
+ ['armv7==1', {
+ 'cflags': [
+ '-march=armv7-a',
+ '-mtune=cortex-a8',
+ '-mfloat-abi=<(arm_float_abi)',
+ ],
+ 'conditions': [
+ ['arm_neon==1', {
+ 'cflags': [ '-mfpu=neon', ],
+ }, {
+ 'cflags': [ '-mfpu=<(arm_fpu)', ],
+ }],
+ ],
+ }],
+ ['OS=="android"', {
+ # Most of the following flags are derived from what Android
+ # uses by default when building for arm, reference for which
+ # can be found in the following file in the Android NDK:
+ # toolchains/arm-linux-androideabi-4.4.3/setup.mk
+ 'cflags': [
+ # The tree-sra optimization (scalar replacement for
+ # aggregates enabling subsequent optimizations) leads to
+ # invalid code generation when using the Android NDK's
+ # compiler (r5-r7). This can be verified using
+ # TestWebKitAPI's WTF.Checked_int8_t test.
+ '-fno-tree-sra',
+ '-fuse-ld=gold',
+ '-Wno-psabi',
+ ],
+ # Android now supports .relro sections properly.
+ # NOTE: While these flags enable the generation of .relro
+ # sections, the generated libraries can still be loaded on
+ # older Android platform versions.
+ 'ldflags': [
+ '-Wl,-z,relro',
+ '-Wl,-z,now',
+ '-fuse-ld=gold',
+ ],
+ 'conditions': [
+ ['arm_thumb == 1', {
+ # Android toolchain doesn't support -mimplicit-it=thumb
+ 'cflags!': [ '-Wa,-mimplicit-it=thumb', ],
+ 'cflags': [ '-mthumb-interwork', ],
+ }],
+ ['armv7==0', {
+ # Flags suitable for Android emulator
+ 'cflags': [
+ '-march=armv5te',
+ '-mtune=xscale',
+ '-msoft-float',
+ ],
+ 'defines': [
+ '__ARM_ARCH_5__',
+ '__ARM_ARCH_5T__',
+ '__ARM_ARCH_5E__',
+ '__ARM_ARCH_5TE__',
+ ],
+ }],
+ ['clang==1', {
+ 'cflags!': [
+ # Clang does not support the following options.
+ '-mthumb-interwork',
+ '-finline-limit=64',
+ '-fno-tree-sra',
+ '-fuse-ld=gold',
+ '-Wno-psabi',
+ ],
+ }],
+ ],
+ }],
+ ],
+ }],
+ ],
+ }],
+ ['linux_fpic==1', {
+ 'cflags': [
+ '-fPIC',
+ ],
+ 'ldflags': [
+ '-fPIC',
+ ],
+ }],
+ ['sysroot!=""', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '--sysroot=<(sysroot)',
+ ],
+ 'ldflags': [
+ '--sysroot=<(sysroot)',
+ ],
+ }]]
+ }],
+ ['clang==1', {
+ 'cflags': [
+ '-Wheader-hygiene',
+ # Clang spots more unused functions.
+ '-Wno-unused-function',
+ # Don't die on dtoa code that uses a char as an array index.
+ '-Wno-char-subscripts',
+ # Especially needed for gtest macros using enum values from Mac
+ # system headers.
+ # TODO(pkasting): In C++11 this is legal, so this should be
+ # removed when we change to that. (This is also why we don't
+ # bother fixing all these cases today.)
+ '-Wno-unnamed-type-template-args',
+ # This (rightyfully) complains about 'override', which we use
+ # heavily.
+ '-Wno-c++11-extensions',
+
+ # Warns on switches on enums that cover all enum values but
+ # also contain a default: branch. Chrome is full of that.
+ '-Wno-covered-switch-default',
+
+ # TODO(thakis): Remove this.
+ '-Wno-implicit-conversion-floating-point-to-bool',
+ ],
+ 'cflags!': [
+ # Clang doesn't seem to know know this flag.
+ '-mfpmath=sse',
+ ],
+ }],
+ ['clang==1 and clang_use_chrome_plugins==1', {
+ 'cflags': [
+ '<@(clang_chrome_plugins_flags)',
+ ],
+ }],
+ ['clang==1 and clang_load!=""', {
+ 'cflags': [
+ '-Xclang', '-load', '-Xclang', '<(clang_load)',
+ ],
+ }],
+ ['clang==1 and clang_add_plugin!=""', {
+ 'cflags': [
+ '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+ ],
+ }],
+ ['clang==1 and "<(GENERATOR)"=="ninja"', {
+ 'cflags': [
+ # See http://crbug.com/110262
+ '-fcolor-diagnostics',
+ ],
+ }],
+ ['asan==1', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '-faddress-sanitizer',
+ '-fno-omit-frame-pointer',
+ ],
+ 'ldflags': [
+ '-faddress-sanitizer',
+ ],
+ 'defines': [
+ 'ADDRESS_SANITIZER',
+ ],
+ }],
+ ],
+ }],
+ ['tsan==1', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '-fthread-sanitizer',
+ '-fno-omit-frame-pointer',
+ '-fPIE',
+ ],
+ 'ldflags': [
+ '-fthread-sanitizer',
+ ],
+ 'defines': [
+ 'THREAD_SANITIZER',
+ 'DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1',
+ ],
+ 'target_conditions': [
+ ['_type=="executable"', {
+ 'ldflags': [
+ '-pie',
+ ],
+ }],
+ ],
+ }],
+ ],
+ }],
+ ['order_profiling!=0 and (chromeos==1 or OS=="linux")', {
+ 'target_conditions' : [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '-finstrument-functions',
+ # Allow mmx intrinsics to inline, so that the
+ # compiler can expand the intrinsics.
+ '-finstrument-functions-exclude-file-list=mmintrin.h',
+ ],
+ }],
+ ],
+ }],
+ ['linux_breakpad==1', {
+ 'cflags': [ '-g' ],
+ 'defines': ['USE_LINUX_BREAKPAD'],
+ }],
+ ['linux_use_heapchecker==1', {
+ 'variables': {'linux_use_tcmalloc%': 1},
+ 'defines': ['USE_HEAPCHECKER'],
+ }],
+ ['linux_use_tcmalloc==0', {
+ 'defines': ['NO_TCMALLOC'],
+ }],
+ ['linux_keep_shadow_stacks==1', {
+ 'defines': ['KEEP_SHADOW_STACKS'],
+ 'cflags': [
+ '-finstrument-functions',
+ # Allow mmx intrinsics to inline, so that the compiler can expand
+ # the intrinsics.
+ '-finstrument-functions-exclude-file-list=mmintrin.h',
+ ],
+ }],
+ ['linux_use_gold_flags==1', {
+ 'ldflags': [
+ # Experimentation found that using four linking threads
+ # saved ~20% of link time.
+ # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+ '-Wl,--threads',
+ '-Wl,--thread-count=4',
+ ],
+ 'conditions': [
+ ['release_valgrind_build==0', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'ldflags': [
+ # There seems to be a conflict of --icf and -pie
+ # in gold which can generate crashy binaries. As
+ # a security measure, -pie takes precendence for
+ # now.
+ #'-Wl,--icf=safe',
+ '-Wl,--icf=none',
+ ],
+ }],
+ ],
+ }],
+ ],
+ }],
+ ['linux_use_gold_binary==1', {
+ 'variables': {
+ 'conditions': [
+ ['inside_chromium_build==1', {
+ # We pass the path to gold to the compiler. gyp leaves
+ # unspecified what the cwd is when running the compiler,
+ # so the normal gyp path-munging fails us. This hack
+ # gets the right path.
+ 'gold_path': '<(PRODUCT_DIR)/../../third_party/gold',
+ }, {
+ 'gold_path': '<(PRODUCT_DIR)/../../Source/WebKit/chromium/third_party/gold',
+ }]
+ ]
+ },
+ 'ldflags': [
+ # Put our gold binary in the search path for the linker.
+ '-B<(gold_path)',
+ ],
+ }],
+ ],
+ },
+ }],
+ # FreeBSD-specific options; note that most FreeBSD options are set above,
+ # with Linux.
+ ['OS=="freebsd"', {
+ 'target_defaults': {
+ 'ldflags': [
+ '-Wl,--no-keep-memory',
+ ],
+ },
+ }],
+ # Android-specific options; note that most are set above with Linux.
+ ['OS=="android"', {
+ 'variables': {
+ # This is the id for the archived chrome symbols. Each build that
+ # archives symbols is assigned an id which is then added to GYP_DEFINES.
+ # This is written to the device log on crashes just prior to dropping a
+ # tombstone. Tools can determine the location of the archived symbols
+ # from the id.
+ 'chrome_symbols_id%': '',
+ 'conditions': [
+ # Use shared stlport library when system one used.
+ # Figure this out early since it needs symbols from libgcc.a, so it
+ # has to be before that in the set of libraries.
+ ['use_system_stlport==1', {
+ 'android_stlport_library': 'stlport',
+ }, {
+ 'android_stlport_library': 'stlport_static',
+ }],
+ ],
+
+ # Placing this variable here prevents from forking libvpx, used
+ # by remoting. Remoting is off, so it needn't built,
+ # so forking it's deps seems like overkill.
+ # But this variable need defined to properly run gyp.
+ # A proper solution is to have an OS==android conditional
+ # in third_party/libvpx/libvpx.gyp to define it.
+ 'libvpx_path': 'lib/linux/arm',
+ },
+ 'target_defaults': {
+ 'variables': {
+ 'release_extra_cflags%': '',
+ },
+
+ 'target_conditions': [
+ # Settings for building device targets using Android's toolchain.
+ # These are based on the setup.mk file from the Android NDK.
+ #
+ # The NDK Android executable link step looks as follows:
+ # $LDFLAGS
+ # $(TARGET_CRTBEGIN_DYNAMIC_O) <-- crtbegin.o
+ # $(PRIVATE_OBJECTS) <-- The .o that we built
+ # $(PRIVATE_STATIC_LIBRARIES) <-- The .a that we built
+ # $(TARGET_LIBGCC) <-- libgcc.a
+ # $(PRIVATE_SHARED_LIBRARIES) <-- The .so that we built
+ # $(PRIVATE_LDLIBS) <-- System .so
+ # $(TARGET_CRTEND_O) <-- crtend.o
+ #
+ # For now the above are approximated for executables by adding
+ # crtbegin.o to the end of the ldflags and 'crtend.o' to the end
+ # of 'libraries'.
+ #
+ # The NDK Android shared library link step looks as follows:
+ # $LDFLAGS
+ # $(PRIVATE_OBJECTS) <-- The .o that we built
+ # -l,--whole-archive
+ # $(PRIVATE_WHOLE_STATIC_LIBRARIES)
+ # -l,--no-whole-archive
+ # $(PRIVATE_STATIC_LIBRARIES) <-- The .a that we built
+ # $(TARGET_LIBGCC) <-- libgcc.a
+ # $(PRIVATE_SHARED_LIBRARIES) <-- The .so that we built
+ # $(PRIVATE_LDLIBS) <-- System .so
+ #
+ # For now, assume that whole static libraries are not needed.
+ #
+ # For both executables and shared libraries, add the proper
+ # libgcc.a to the start of libraries which puts it in the
+ # proper spot after .o and .a files get linked in.
+ #
+ # TODO: The proper thing to do longer-tem would be proper gyp
+ # support for a custom link command line.
+ ['_toolset=="target"', {
+ 'conditions': [
+ ['build_with_mozilla==0', {
+ 'cflags!': [
+ '-pthread', # Not supported by Android toolchain.
+ ],
+ 'cflags': [
+ '-ffunction-sections',
+ '-funwind-tables',
+ '-g',
+ '-fstack-protector',
+ '-fno-short-enums',
+ '-finline-limit=64',
+ '-Wa,--noexecstack',
+ '<@(release_extra_cflags)',
+ ],
+ 'ldflags!': [
+ '-pthread', # Not supported by Android toolchain.
+ ],
+ 'ldflags': [
+ '-nostdlib',
+ '-Wl,--no-undefined',
+ # Don't export symbols from statically linked libraries.
+ '-Wl,--exclude-libs=ALL',
+ ],
+ 'libraries': [
+ '-l<(android_stlport_library)',
+ # Manually link the libgcc.a that the cross compiler uses.
+ '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+ '-lc',
+ '-ldl',
+ '-lstdc++',
+ '-lm',
+ ],
+ 'conditions': [
+ ['android_upstream_bringup==1', {
+ 'defines': ['ANDROID_UPSTREAM_BRINGUP=1',],
+ }],
+ ['clang==1', {
+ 'cflags': [
+ # Work around incompatibilities between bionic and clang
+ # headers.
+ '-D__compiler_offsetof=__builtin_offsetof',
+ '-Dnan=__builtin_nan',
+ ],
+ 'conditions': [
+ ['target_arch=="arm"', {
+ 'cflags': [
+ '-target arm-linux-androideabi',
+ '-mllvm -arm-enable-ehabi',
+ ],
+ 'ldflags': [
+ '-target arm-linux-androideabi',
+ ],
+ }],
+ ['target_arch=="ia32"', {
+ 'cflags': [
+ '-target x86-linux-androideabi',
+ ],
+ 'ldflags': [
+ '-target x86-linux-androideabi',
+ ],
+ }],
+ ],
+ }],
+ ['android_build_type==0', {
+ 'defines': [
+ # The NDK has these things, but doesn't define the constants
+ # to say that it does. Define them here instead.
+ 'HAVE_SYS_UIO_H',
+ ],
+ 'cflags': [
+ '--sysroot=<(android_ndk_sysroot)',
+ ],
+ 'ldflags': [
+ '--sysroot=<(android_ndk_sysroot)',
+ ],
+ }],
+ ['android_build_type==1', {
+ 'include_dirs': [
+ # OpenAL headers from the Android tree.
+ '<(android_src)/frameworks/wilhelm/include',
+ ],
+ 'cflags': [
+ # Chromium builds its own (non-third-party) code with
+ # -Werror to make all warnings into errors. However, Android
+ # enables warnings that Chromium doesn't, so some of these
+ # extra warnings trip and break things.
+ # For now, we leave these warnings enabled but prevent them
+ # from being treated as errors.
+ #
+ # Things that are part of -Wextra:
+ '-Wno-error=extra', # Enabled by -Wextra, but no specific flag
+ '-Wno-error=ignored-qualifiers',
+ '-Wno-error=type-limits',
+ # Other things unrelated to -Wextra:
+ '-Wno-error=non-virtual-dtor',
+ '-Wno-error=sign-promo',
+ ],
+ 'cflags_cc': [
+ # Disabling c++0x-compat should be handled in WebKit, but
+ # this currently doesn't work because gcc_version is not set
+ # correctly when building with the Android build system.
+ # TODO(torne): Fix this in WebKit.
+ '-Wno-error=c++0x-compat',
+ ],
+ }],
+ ['android_build_type==1 and chromium_code==0', {
+ 'cflags': [
+ # There is a class of warning which:
+ # 1) Android always enables and also treats as errors
+ # 2) Chromium ignores in third party code
+ # For now, I am leaving these warnings enabled but preventing
+ # them from being treated as errors here.
+ '-Wno-error=address',
+ '-Wno-error=format-security',
+ '-Wno-error=non-virtual-dtor',
+ '-Wno-error=return-type',
+ '-Wno-error=sequence-point',
+ ],
+ }],
+ ['target_arch == "arm"', {
+ 'ldflags': [
+ # Enable identical code folding to reduce size.
+ '-Wl,--icf=safe',
+ ],
+ }],
+ # NOTE: The stlport header include paths below are specified in
+ # cflags rather than include_dirs because they need to come
+ # after include_dirs. Think of them like system headers, but
+ # don't use '-isystem' because the arm-linux-androideabi-4.4.3
+ # toolchain (circa Gingerbread) will exhibit strange errors.
+ # The include ordering here is important; change with caution.
+ ['use_system_stlport==1', {
+ 'cflags': [
+ # For libstdc++/include, which is used by stlport.
+ '-I<(android_src)/bionic',
+ '-I<(android_src)/external/stlport/stlport',
+ ],
+ }, { # else: use_system_stlport!=1
+ 'cflags': [
+ '-I<(android_ndk_root)/sources/cxx-stl/stlport/stlport',
+ ],
+ 'conditions': [
+ ['target_arch=="arm" and armv7==1', {
+ 'ldflags': [
+ '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi-v7a',
+ ],
+ }],
+ ['target_arch=="arm" and armv7==0', {
+ 'ldflags': [
+ '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi',
+ ],
+ }],
+ ['target_arch=="ia32"', {
+ 'ldflags': [
+ '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/x86',
+ ],
+ }],
+ ],
+ }],
+ ['target_arch=="ia32"', {
+ # The x86 toolchain currently has problems with stack-protector.
+ 'cflags!': [
+ '-fstack-protector',
+ ],
+ 'cflags': [
+ '-fno-stack-protector',
+ ],
+ }],
+ ],
+ 'target_conditions': [
+ ['_type=="executable"', {
+ 'ldflags': [
+ '-Bdynamic',
+ '-Wl,-dynamic-linker,/system/bin/linker',
+ '-Wl,--gc-sections',
+ '-Wl,-z,nocopyreloc',
+ # crtbegin_dynamic.o should be the last item in ldflags.
+ '<(android_ndk_lib)/crtbegin_dynamic.o',
+ ],
+ 'libraries': [
+ # crtend_android.o needs to be the last item in libraries.
+ # Do not add any libraries after this!
+ '<(android_ndk_lib)/crtend_android.o',
+ ],
+ }],
+ ['_type=="shared_library" or _type=="loadable_module"', {
+ 'ldflags': [
+ '-Wl,-shared,-Bsymbolic',
+ # crtbegin_so.o should be the last item in ldflags.
+ '<(android_ndk_lib)/crtbegin_so.o',
+ ],
+ 'libraries': [
+ # crtend_so.o needs to be the last item in libraries.
+ # Do not add any libraries after this!
+ '<(android_ndk_lib)/crtend_so.o',
+ ],
+ }],
+ ],
+
+ }], # build_with_mozilla== 0
+
+ ],
+ 'defines': [
+ 'ANDROID',
+ '__GNU_SOURCE=1', # Necessary for clone()
+ 'USE_STLPORT=1',
+ '_STLP_USE_PTR_SPECIALIZATIONS=1',
+ 'CHROME_SYMBOLS_ID="<(chrome_symbols_id)"',
+ ],
+ }],
+ # Settings for building host targets using the system toolchain.
+ ['_toolset=="host"', {
+ 'cflags!': [
+ # Due to issues in Clang build system, using ASan on 32-bit
+ # binaries on x86_64 host is problematic.
+ # TODO(eugenis): re-enable.
+ '-faddress-sanitizer',
+ ],
+ 'ldflags!': [
+ '-faddress-sanitizer',
+ '-Wl,-z,noexecstack',
+ '-Wl,--gc-sections',
+ '-Wl,-O1',
+ '-Wl,--as-needed',
+ ],
+ 'sources/': [
+ ['exclude', '_android(_unittest)?\\.cc$'],
+ ['exclude', '(^|/)android/']
+ ],
+ }],
+ ],
+ },
+ }],
+ ['OS=="solaris"', {
+ 'cflags!': ['-fvisibility=hidden'],
+ 'cflags_cc!': ['-fvisibility-inlines-hidden'],
+ }],
+ ['OS=="mac" or OS=="ios"', {
+ 'target_defaults': {
+ 'mac_bundle': 0,
+ 'xcode_settings': {
+ 'ALWAYS_SEARCH_USER_PATHS': 'NO',
+ 'GCC_C_LANGUAGE_STANDARD': 'c99', # -std=c99
+ 'GCC_CW_ASM_SYNTAX': 'NO', # No -fasm-blocks
+ 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', # -fno-exceptions
+ 'GCC_ENABLE_CPP_RTTI': 'NO', # -fno-rtti
+ 'GCC_ENABLE_PASCAL_STRINGS': 'NO', # No -mpascal-strings
+ # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+ 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+ 'GCC_OBJC_CALL_CXX_CDTORS': 'YES', # -fobjc-call-cxx-cdtors
+ 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
+ 'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics
+ 'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES', # -Werror
+ 'GCC_VERSION': '4.2',
+ 'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof
+ 'USE_HEADERMAP': 'NO',
+ 'WARNING_CFLAGS': [
+ '-Wall',
+ '-Wendif-labels',
+ '-Wextra',
+ # Don't warn about unused function parameters.
+ '-Wno-unused-parameter',
+ # Don't warn about the "struct foo f = {0};" initialization
+ # pattern.
+ '-Wno-missing-field-initializers',
+ ],
+ 'conditions': [
+ ['chromium_mac_pch', {'GCC_PRECOMPILE_PREFIX_HEADER': 'YES'},
+ {'GCC_PRECOMPILE_PREFIX_HEADER': 'NO'}
+ ],
+ ],
+ },
+ 'target_conditions': [
+ ['_type!="static_library"', {
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+ }],
+ ['_mac_bundle', {
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+ }],
+ ], # target_conditions
+ }, # target_defaults
+ }], # OS=="mac" or OS=="ios"
+ ['OS=="mac"', {
+ 'target_defaults': {
+ 'variables': {
+ # These should end with %, but there seems to be a bug with % in
+ # variables that are intended to be set to different values in
+ # different targets, like these.
+ 'mac_pie': 1, # Most executables can be position-independent.
+ 'mac_real_dsym': 0, # Fake .dSYMs are fine in most cases.
+ # Strip debugging symbols from the target.
+ 'mac_strip': '<(mac_strip_release)',
+ },
+ 'xcode_settings': {
+ 'GCC_DYNAMIC_NO_PIC': 'NO', # No -mdynamic-no-pic
+ # (Equivalent to -fPIC)
+ # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+ 'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+ # Keep pch files below xcodebuild/.
+ 'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders',
+ 'OTHER_CFLAGS': [
+ '-fno-strict-aliasing', # See http://crbug.com/32204
+ ],
+ 'conditions': [
+ ['clang==1', {
+ 'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
+ 'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
+
+ # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
+ # when buliding with clang. This warning is triggered when the
+ # override keyword is used via the OVERRIDE macro from
+ # base/compiler_specific.h.
+ 'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
+
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'WARNING_CFLAGS': [
+ '-Wheader-hygiene',
+ # Don't die on dtoa code that uses a char as an array index.
+ # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+ '-Wno-char-subscripts',
+ # Clang spots more unused functions.
+ '-Wno-unused-function',
+ # See comments on this flag higher up in this file.
+ '-Wno-unnamed-type-template-args',
+ # This (rightyfully) complains about 'override', which we use
+ # heavily.
+ '-Wno-c++11-extensions',
+
+ # Warns on switches on enums that cover all enum values but
+ # also contain a default: branch. Chrome is full of that.
+ '-Wno-covered-switch-default',
+
+ # TODO(thakis): Remove this.
+ '-Wno-implicit-conversion-floating-point-to-bool',
+ ],
+ }],
+ ['clang==1 and clang_use_chrome_plugins==1', {
+ 'OTHER_CFLAGS': [
+ '<@(clang_chrome_plugins_flags)',
+ ],
+ }],
+ ['clang==1 and clang_load!=""', {
+ 'OTHER_CFLAGS': [
+ '-Xclang', '-load', '-Xclang', '<(clang_load)',
+ ],
+ }],
+ ['clang==1 and clang_add_plugin!=""', {
+ 'OTHER_CFLAGS': [
+ '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+ ],
+ }],
+ ['clang==1 and "<(GENERATOR)"=="ninja"', {
+ 'OTHER_CFLAGS': [
+ # See http://crbug.com/110262
+ '-fcolor-diagnostics',
+ ],
+ }],
+ ],
+ },
+ 'conditions': [
+ ['clang==1', {
+ 'variables': {
+ 'clang_dir': '../third_party/llvm-build/Release+Asserts/bin',
+ },
+ }],
+ ['asan==1', {
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-faddress-sanitizer',
+ ],
+ },
+ 'defines': [
+ 'ADDRESS_SANITIZER',
+ ],
+ }],
+ ],
+ 'target_conditions': [
+ ['_type!="static_library"', {
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+ 'conditions': [
+ ['asan==1', {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-faddress-sanitizer',
+ ],
+ },
+ }],
+ ],
+ }],
+ ['_mac_bundle', {
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+ }],
+ ['_type=="executable"', {
+ 'postbuilds': [
+ {
+ # Arranges for data (heap) pages to be protected against
+ # code execution when running on Mac OS X 10.7 ("Lion"), and
+ # ensures that the position-independent executable (PIE) bit
+ # is set for ASLR when running on Mac OS X 10.5 ("Leopard").
+ 'variables': {
+ # Define change_mach_o_flags in a variable ending in _path
+ # so that GYP understands it's a path and performs proper
+ # relativization during dict merging.
+ 'change_mach_o_flags':
+ 'mac/change_mach_o_flags_from_xcode.sh',
+ 'change_mach_o_flags_options%': [
+ ],
+ 'target_conditions': [
+ ['mac_pie==0 or release_valgrind_build==1', {
+ # Don't enable PIE if it's unwanted. It's unwanted if
+ # the target specifies mac_pie=0 or if building for
+ # Valgrind, because Valgrind doesn't understand slide.
+ # See the similar mac_pie/release_valgrind_build check
+ # below.
+ 'change_mach_o_flags_options': [
+ '--no-pie',
+ ],
+ }],
+ ],
+ },
+ 'postbuild_name': 'Change Mach-O Flags',
+ 'action': [
+ '$(srcdir)$(os_sep)build$(os_sep)<(change_mach_o_flags)',
+ '>@(change_mach_o_flags_options)',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['asan==1', {
+ 'variables': {
+ 'asan_saves_file': 'asan.saves',
+ },
+ 'xcode_settings': {
+ 'CHROMIUM_STRIP_SAVE_FILE': '<(asan_saves_file)',
+ },
+ }],
+ ],
+ 'target_conditions': [
+ ['mac_pie==1 and release_valgrind_build==0', {
+ # Turn on position-independence (ASLR) for executables. When
+ # PIE is on for the Chrome executables, the framework will
+ # also be subject to ASLR.
+ # Don't do this when building for Valgrind, because Valgrind
+ # doesn't understand slide. TODO: Make Valgrind on Mac OS X
+ # understand slide, and get rid of the Valgrind check.
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-Wl,-pie', # Position-independent executable (MH_PIE)
+ ],
+ },
+ }],
+ ],
+ }],
+ ['(_type=="executable" or _type=="shared_library" or \
+ _type=="loadable_module") and mac_strip!=0', {
+ 'target_conditions': [
+ ['mac_real_dsym == 1', {
+ # To get a real .dSYM bundle produced by dsymutil, set the
+ # debug information format to dwarf-with-dsym. Since
+ # strip_from_xcode will not be used, set Xcode to do the
+ # stripping as well.
+ 'configurations': {
+ 'Release_Base': {
+ 'xcode_settings': {
+ 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'target_conditions': [
+ ['_type=="shared_library" or _type=="loadable_module"', {
+ # The Xcode default is to strip debugging symbols
+ # only (-S). Local symbols should be stripped as
+ # well, which will be handled by -x. Xcode will
+ # continue to insert -S when stripping even when
+ # additional flags are added with STRIPFLAGS.
+ 'STRIPFLAGS': '-x',
+ }], # _type=="shared_library" or _type=="loadable_module"'
+ ], # target_conditions
+ }, # xcode_settings
+ }, # configuration "Release"
+ }, # configurations
+ }, { # mac_real_dsym != 1
+ # To get a fast fake .dSYM bundle, use a post-build step to
+ # produce the .dSYM and strip the executable. strip_from_xcode
+ # only operates in the Release configuration.
+ 'postbuilds': [
+ {
+ 'variables': {
+ # Define strip_from_xcode in a variable ending in _path
+ # so that gyp understands it's a path and performs proper
+ # relativization during dict merging.
+ 'strip_from_xcode': 'mac/strip_from_xcode',
+ },
+ 'postbuild_name': 'Strip If Needed',
+ 'action': ['$(srcdir)$(os_sep)build$(os_sep)<(strip_from_xcode)'],
+ },
+ ], # postbuilds
+ }], # mac_real_dsym
+ ], # target_conditions
+ }], # (_type=="executable" or _type=="shared_library" or
+ # _type=="loadable_module") and mac_strip!=0
+ ], # target_conditions
+ }, # target_defaults
+ }], # OS=="mac"
+ ['OS=="ios"', {
+ 'target_defaults': {
+ 'xcode_settings' : {
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+
+ # This next block is mostly common with the 'mac' section above,
+ # but keying off (or setting) 'clang' isn't valid for iOS as it
+ # also seems to mean using the custom build of clang.
+
+ # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
+ # when buliding with clang. This warning is triggered when the
+ # override keyword is used via the OVERRIDE macro from
+ # base/compiler_specific.h.
+ 'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
+ 'WARNING_CFLAGS': [
+ '-Wheader-hygiene',
+ # Don't die on dtoa code that uses a char as an array index.
+ # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+ '-Wno-char-subscripts',
+ # Clang spots more unused functions.
+ '-Wno-unused-function',
+ # See comments on this flag higher up in this file.
+ '-Wno-unnamed-type-template-args',
+ # This (rightyfully) complains about 'override', which we use
+ # heavily.
+ '-Wno-c++11-extensions',
+ ],
+ },
+ 'target_conditions': [
+ ['_type=="executable"', {
+ 'configurations': {
+ 'Release_Base': {
+ 'xcode_settings': {
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ },
+ },
+ },
+ 'xcode_settings': {
+ 'conditions': [
+ ['chromium_ios_signing', {
+ # iOS SDK wants everything for device signed.
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ }, {
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+ }],
+ ],
+ },
+ }],
+ ], # target_conditions
+ }, # target_defaults
+ }], # OS=="ios"
+ ['OS=="win"', {
+ 'target_defaults': {
+ 'defines': [
+ 'WIN32',
+ '_WINDOWS',
+ 'NOMINMAX',
+ 'PSAPI_VERSION=1',
+ '_CRT_RAND_S',
+ 'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS',
+ 'WIN32_LEAN_AND_MEAN',
+ '_ATL_NO_OPENGL',
+ ],
+ 'conditions': [
+ ['build_with_mozilla==0', {
+ 'defines': [
+ '_WIN32_WINNT=0x0602',
+ 'WINVER=0x0602',
+ ],
+ }],
+ ['buildtype=="Official"', {
+ # In official builds, targets can self-select an optimization
+ # level by defining a variable named 'optimize', and setting it
+ # to one of
+ # - "size", optimizes for minimal code size - the default.
+ # - "speed", optimizes for speed over code size.
+ # - "max", whole program optimization and link-time code
+ # generation. This is very expensive and should be used
+ # sparingly.
+ 'variables': {
+ 'optimize%': 'size',
+ },
+ 'target_conditions': [
+ ['optimize=="size"', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ # 1, optimizeMinSpace, Minimize Size (/O1)
+ 'Optimization': '1',
+ # 2, favorSize - Favor small code (/Os)
+ 'FavorSizeOrSpeed': '2',
+ },
+ },
+ },
+ ],
+ ['optimize=="speed"', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+ 'Optimization': '2',
+ # 1, favorSpeed - Favor fast code (/Ot)
+ 'FavorSizeOrSpeed': '1',
+ },
+ },
+ },
+ ],
+ ['optimize=="max"', {
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+ 'Optimization': '2',
+ # 1, favorSpeed - Favor fast code (/Ot)
+ 'FavorSizeOrSpeed': '1',
+ # This implies link time code generation.
+ 'WholeProgramOptimization': 'true',
+ },
+ },
+ },
+ ],
+ ],
+ },
+ ],
+ ['component=="static_library"', {
+ 'defines': [
+ '_HAS_EXCEPTIONS=0',
+ ],
+ }],
+ ['MSVS_VERSION=="2008"', {
+ 'defines': [
+ '_HAS_TR1=0',
+ ],
+ }],
+ ['secure_atl', {
+ 'defines': [
+ '_SECURE_ATL',
+ ],
+ }],
+ ],
+ 'msvs_system_include_dirs': [
+ '<(windows_sdk_path)/Include/shared',
+ '<(windows_sdk_path)/Include/um',
+ '<(windows_sdk_path)/Include/winrt',
+ '<(directx_sdk_path)/Include',
+ '$(VSInstallDir)/VC/atlmfc/include',
+ ],
+ 'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'],
+ 'msvs_disabled_warnings': [4351, 4396, 4503, 4819,
+ # TODO(maruel): These warnings are level 4. They will be slowly
+ # removed as code is fixed.
+ 4100, 4121, 4125, 4127, 4130, 4131, 4189, 4201, 4238, 4244, 4245,
+ 4310, 4355, 4428, 4481, 4505, 4510, 4512, 4530, 4610, 4611, 4701,
+ 4702, 4706,
+ ],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': ['/MP'],
+ 'MinimalRebuild': 'false',
+ 'BufferSecurityCheck': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'RuntimeTypeInfo': 'false',
+ 'WarningLevel': '4',
+ 'WarnAsError': 'true',
+ 'DebugInformationFormat': '3',
+ 'conditions': [
+ ['component=="shared_library"', {
+ 'ExceptionHandling': '1', # /EHsc
+ }, {
+ 'ExceptionHandling': '0',
+ }],
+ ],
+ },
+ 'VCLibrarianTool': {
+ 'AdditionalOptions': ['/ignore:4221'],
+ 'AdditionalLibraryDirectories': [
+ '<(directx_sdk_path)/Lib/x86',
+ '<(windows_sdk_path)/Lib/win8/um/x86',
+ ],
+ },
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ 'wininet.lib',
+ 'dnsapi.lib',
+ 'version.lib',
+ 'msimg32.lib',
+ 'ws2_32.lib',
+ 'usp10.lib',
+ 'psapi.lib',
+ 'dbghelp.lib',
+ 'winmm.lib',
+ 'shlwapi.lib',
+ ],
+
+ 'conditions': [
+ ['msvs_express', {
+ # Explicitly required when using the ATL with express
+ 'AdditionalDependencies': [
+ 'atlthunk.lib',
+ ],
+
+ # ATL 8.0 included in WDK 7.1 makes the linker to generate
+ # almost eight hundred LNK4254 and LNK4078 warnings:
+ # - warning LNK4254: section 'ATL' (50000040) merged into
+ # '.rdata' (40000040) with different attributes
+ # - warning LNK4078: multiple 'ATL' sections found with
+ # different attributes
+ 'AdditionalOptions': ['/ignore:4254', '/ignore:4078'],
+ }],
+ ['MSVS_VERSION=="2005e"', {
+ # Non-express versions link automatically to these
+ 'AdditionalDependencies': [
+ 'advapi32.lib',
+ 'comdlg32.lib',
+ 'ole32.lib',
+ 'shell32.lib',
+ 'user32.lib',
+ 'winspool.lib',
+ ],
+ }],
+ ],
+ 'AdditionalLibraryDirectories': [
+ '<(directx_sdk_path)/Lib/x86',
+ '<(windows_sdk_path)/Lib/win8/um/x86',
+ ],
+ 'GenerateDebugInformation': 'true',
+ 'MapFileName': '$(OutDir)\\$(TargetName).map',
+ 'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+ 'FixedBaseAddress': '1',
+ # SubSystem values:
+ # 0 == not set
+ # 1 == /SUBSYSTEM:CONSOLE
+ # 2 == /SUBSYSTEM:WINDOWS
+ # Most of the executables we'll ever create are tests
+ # and utilities with console output.
+ 'SubSystem': '1',
+ },
+ 'VCMIDLTool': {
+ 'GenerateStublessProxies': 'true',
+ 'TypeLibraryName': '$(InputName).tlb',
+ 'OutputDirectory': '$(IntDir)',
+ 'HeaderFileName': '$(InputName).h',
+ 'DLLDataFileName': '$(InputName).dlldata.c',
+ 'InterfaceIdentifierFileName': '$(InputName)_i.c',
+ 'ProxyFileName': '$(InputName)_p.c',
+ },
+ 'VCResourceCompilerTool': {
+ 'Culture' : '1033',
+ 'AdditionalIncludeDirectories': [
+ '<(DEPTH)',
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ },
+ },
+ },
+ }],
+ ['disable_nacl==1', {
+ 'target_defaults': {
+ 'defines': [
+ 'DISABLE_NACL',
+ ],
+ },
+ }],
+ ['OS=="win" and msvs_use_common_linker_extras', {
+ 'target_defaults': {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'DelayLoadDLLs': [
+ 'dbghelp.dll',
+ 'dwmapi.dll',
+ 'shell32.dll',
+ 'uxtheme.dll',
+ ],
+ },
+ },
+ 'configurations': {
+ 'x86_Base': {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalOptions': [
+ '/safeseh',
+ '/dynamicbase',
+ '/ignore:4199',
+ '/ignore:4221',
+ '/nxcompat',
+ ],
+ },
+ },
+ },
+ 'x64_Base': {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'AdditionalOptions': [
+ # safeseh is not compatible with x64
+ '/dynamicbase',
+ '/ignore:4199',
+ '/ignore:4221',
+ '/nxcompat',
+ ],
+ },
+ },
+ },
+ },
+ },
+ }],
+ ['enable_new_npdevice_api==1', {
+ 'target_defaults': {
+ 'defines': [
+ 'ENABLE_NEW_NPDEVICE_API',
+ ],
+ },
+ }],
+ ['clang==1', {
+ 'conditions': [
+ ['OS=="android"', {
+ # Android could use the goma with clang.
+ 'make_global_settings': [
+ ['CC', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} ${CHROME_SRC}/<(make_clang_dir)/bin/clang)'],
+ ['CXX', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} ${CHROME_SRC}/<(make_clang_dir)/bin/clang++)'],
+ ['LINK', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} ${CHROME_SRC}/<(make_clang_dir)/bin/clang++)'],
+ ['CC.host', '$(CC)'],
+ ['CXX.host', '$(CXX)'],
+ ['LINK.host', '$(LINK)'],
+ ],
+ }, {
+ 'make_global_settings': [
+ ['CC', '<(make_clang_dir)/bin/clang'],
+ ['CXX', '<(make_clang_dir)/bin/clang++'],
+ ['LINK', '$(CXX)'],
+ ['CC.host', '$(CC)'],
+ ['CXX.host', '$(CXX)'],
+ ['LINK.host', '$(LINK)'],
+ ],
+ }],
+ ],
+ }],
+ ['OS=="android" and clang==0', {
+ # Hardcode the compiler names in the Makefile so that
+ # it won't depend on the environment at make time.
+ 'make_global_settings': [
+ ['CC', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-gcc)'],
+ ['CXX', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-g++)'],
+ ['LINK', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <(android_toolchain)/*-gcc)'],
+ ['CC.host', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <!(which gcc))'],
+ ['CXX.host', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <!(which g++))'],
+ ['LINK.host', '<!(/bin/echo -n ${ANDROID_GOMA_WRAPPER} <!(which g++))'],
+ ],
+ }],
+ ],
+ 'xcode_settings': {
+ # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+ # This block adds *project-wide* configuration settings to each project
+ # file. It's almost always wrong to put things here. Specify your
+ # custom xcode_settings in target_defaults to add them to targets instead.
+
+ 'conditions': [
+ # In an Xcode Project Info window, the "Base SDK for All Configurations"
+ # setting sets the SDK on a project-wide basis. In order to get the
+ # configured SDK to show properly in the Xcode UI, SDKROOT must be set
+ # here at the project level.
+ ['OS=="mac"', {
+ 'conditions': [
+ ['mac_sdk_path==""', {
+ 'SDKROOT': 'macosx<(mac_sdk)', # -isysroot
+ }, {
+ 'SDKROOT': '<(mac_sdk_path)', # -isysroot
+ }],
+ ],
+ }],
+ ['OS=="ios"', {
+ 'conditions': [
+ ['ios_sdk_path==""', {
+ 'SDKROOT': 'iphoneos<(ios_sdk)', # -isysroot
+ }, {
+ 'SDKROOT': '<(ios_sdk_path)', # -isysroot
+ }],
+ ],
+ }],
+ ['OS=="ios"', {
+ # Just build armv7 since iOS 4.3+ only supports armv7.
+ 'ARCHS': '$(ARCHS_UNIVERSAL_IPHONE_OS)',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '<(ios_deployment_target)',
+ # Target both iPhone and iPad.
+ 'TARGETED_DEVICE_FAMILY': '1,2',
+ }],
+ ],
+
+ # The Xcode generator will look for an xcode_settings section at the root
+ # of each dict and use it to apply settings on a file-wide basis. Most
+ # settings should not be here, they should be in target-specific
+ # xcode_settings sections, or better yet, should use non-Xcode-specific
+ # settings in target dicts. SYMROOT is a special case, because many other
+ # Xcode variables depend on it, including variables such as
+ # PROJECT_DERIVED_FILE_DIR. When a source group corresponding to something
+ # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the
+ # files to appear (when present) in the UI as actual files and not red
+ # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR,
+ # and therefore SYMROOT, needs to be set at the project level.
+ 'SYMROOT': '<(DEPTH)/xcodebuild',
+ },
+}
diff --git a/media/webrtc/trunk/build/common_untrusted.gypi b/media/webrtc/trunk/build/common_untrusted.gypi
new file mode 100644
index 000000000..086264e03
--- /dev/null
+++ b/media/webrtc/trunk/build/common_untrusted.gypi
@@ -0,0 +1,29 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This GYP file should be included for every target in Chromium that is built
+# using the NaCl toolchain.
+{
+ 'includes': [
+ '../native_client/build/untrusted.gypi',
+ ],
+ 'target_defaults': {
+ 'conditions': [
+ ['target_arch=="arm"', {
+ 'variables': {
+ 'clang': 1,
+ },
+ 'defines': [
+ # Needed by build/build_config.h processor architecture detection.
+ '__ARMEL__',
+ # Needed by base/third_party/nspr/prtime.cc.
+ '__arm__',
+ # Disable ValGrind. The assembly code it generates causes the build
+ # to fail.
+ 'NVALGRIND',
+ ],
+ }],
+ ],
+ },
+} \ No newline at end of file
diff --git a/media/webrtc/trunk/build/compiler_version.py b/media/webrtc/trunk/build/compiler_version.py
new file mode 100755
index 000000000..eae7b176d
--- /dev/null
+++ b/media/webrtc/trunk/build/compiler_version.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+def GetVersion(compiler):
+ try:
+ # Note that compiler could be something tricky like "distcc g++".
+ compiler = compiler + " -dumpversion"
+ pipe = subprocess.Popen(compiler, shell=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ gcc_output, gcc_error = pipe.communicate()
+ if pipe.returncode:
+ raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+ result = re.match(r"(\d+)\.(\d+)", gcc_output)
+ return result.group(1) + result.group(2)
+ except Exception, e:
+ if gcc_error:
+ sys.stderr.write(gcc_error)
+ print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+ print >> sys.stderr, e
+ return ""
+
+def GetVersionFromEnvironment(compiler_env):
+ """ Returns the version of compiler
+
+ If the compiler was set by the given environment variable and exists,
+ return its version, otherwise None is returned.
+ """
+ cxx = os.getenv(compiler_env, None)
+ if cxx:
+ cxx_version = GetVersion(cxx)
+ if cxx_version != "":
+ return cxx_version
+ return None
+
+def main():
+ # Check if CXX_target or CXX environment variable exists an if it does use
+ # that compiler.
+ # TODO: Fix ninja (see http://crbug.com/140900) instead and remove this code
+ # In ninja's cross compile mode, the CXX_target is target compiler, while
+ # the CXX is host. The CXX_target needs be checked first, though the target
+ # and host compiler have different version, there seems no issue to use the
+ # target compiler's version number as gcc_version in Android.
+ cxx_version = GetVersionFromEnvironment("CXX_target")
+ if cxx_version:
+ print cxx_version
+ return 0
+
+ cxx_version = GetVersionFromEnvironment("CXX")
+ if cxx_version:
+ print cxx_version
+ return 0
+
+ # Otherwise we check the g++ version.
+ gccversion = GetVersion("g++")
+ if gccversion != "":
+ print gccversion
+ return 0
+
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/copy_test_data_ios.gypi b/media/webrtc/trunk/build/copy_test_data_ios.gypi
new file mode 100644
index 000000000..150df6e1d
--- /dev/null
+++ b/media/webrtc/trunk/build/copy_test_data_ios.gypi
@@ -0,0 +1,48 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to copy test data files into
+# an iOS app bundle. To use this the following variables need to be defined:
+# test_data_files: list: paths to test data files or directories
+# test_data_prefix: string: a directory prefix that will be prepended to each
+# output path. Generally, this should be the base
+# directory of the gypi file containing the unittest
+# target (e.g. "base" or "chrome").
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'my_unittests',
+# 'conditions': [
+# ['OS == "ios"', {
+# 'actions': [
+# {
+# 'action_name': 'copy_test_data',
+# 'variables': {
+# 'test_data_files': [
+# 'path/to/datafile.txt',
+# 'path/to/data/directory/',
+# ]
+# 'test_data_prefix' : 'prefix',
+# },
+# 'includes': ['path/to/this/gypi/file'],
+# },
+# ],
+# }],
+# }
+#
+
+{
+ 'inputs': [
+ '<!@pymod_do_main(copy_test_data_ios --inputs <(test_data_files))',
+ ],
+ 'outputs': [
+ '<!@pymod_do_main(copy_test_data_ios -o <(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix) --outputs <(test_data_files))',
+ ],
+ 'action': [
+ 'python',
+ '<(DEPTH)/build/copy_test_data_ios.py',
+ '-o', '<(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix)',
+ '<(_inputs)',
+ ],
+}
diff --git a/media/webrtc/trunk/build/copy_test_data_ios.py b/media/webrtc/trunk/build/copy_test_data_ios.py
new file mode 100755
index 000000000..513073583
--- /dev/null
+++ b/media/webrtc/trunk/build/copy_test_data_ios.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+ pass
+
+def ListFilesForPath(path):
+ """Returns a list of all the files under a given path."""
+ output = []
+ # Ignore dotfiles and dot directories.
+ # TODO(rohitrao): This will fail to exclude cases where the initial argument
+ # is a relative path that starts with a dot.
+ if os.path.basename(path).startswith('.'):
+ return output
+
+ # Files get returned without modification.
+ if not os.path.isdir(path):
+ output.append(path)
+ return output
+
+ # Directories get recursively expanded.
+ contents = os.listdir(path)
+ for item in contents:
+ full_path = os.path.join(path, item)
+ output.extend(ListFilesForPath(full_path))
+ return output
+
+def CalcInputs(inputs):
+ """Computes the full list of input files for a set of command-line arguments.
+ """
+ # |inputs| is a list of strings, each of which may contain muliple paths
+ # separated by spaces.
+ output = []
+ for input in inputs:
+ tokens = input.split()
+ for token in tokens:
+ output.extend(ListFilesForPath(token))
+ return output
+
+def CopyFiles(relative_filenames, output_basedir):
+ """Copies files to the given output directory."""
+ for file in relative_filenames:
+ relative_dirname = os.path.dirname(file)
+ output_dir = os.path.join(output_basedir, relative_dirname)
+ output_filename = os.path.join(output_basedir, file)
+
+ # In cases where a directory has turned into a file or vice versa, delete it
+ # before copying it below.
+ if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+ os.remove(output_dir)
+ if os.path.exists(output_filename) and os.path.isdir(output_filename):
+ shutil.rmtree(output_filename)
+
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ shutil.copy(file, output_filename)
+
+def DoMain(argv):
+ parser = optparse.OptionParser()
+ usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+ parser.set_usage(usage)
+ parser.add_option('-o', dest='output_dir')
+ parser.add_option('--inputs', action='store_true', dest='list_inputs')
+ parser.add_option('--outputs', action='store_true', dest='list_outputs')
+ options, arglist = parser.parse_args(argv)
+
+ if len(arglist) == 0:
+ raise WrongNumberOfArgumentsException('<input_files> required.')
+
+ files_to_copy = CalcInputs(arglist)
+ if options.list_inputs:
+ return '\n'.join(files_to_copy)
+
+ if not options.output_dir:
+ raise WrongNumberOfArgumentsException('-o required.')
+
+ if options.list_outputs:
+ outputs = [os.path.join(options.output_dir, x) for x in files_to_copy]
+ return '\n'.join(outputs)
+
+ CopyFiles(files_to_copy, options.output_dir)
+ return
+
+def main(argv):
+ try:
+ result = DoMain(argv[1:])
+ except WrongNumberOfArgumentsException, e:
+ print >>sys.stderr, e
+ return 1
+ if result:
+ print result
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/cp.py b/media/webrtc/trunk/build/cp.py
new file mode 100755
index 000000000..dd98e1db1
--- /dev/null
+++ b/media/webrtc/trunk/build/cp.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import shutil
+import sys
+
+
+def Main(src, dst):
+ # Use copy instead of copyfile to ensure the executable bit is copied.
+ return shutil.copy(src, dst)
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/media/webrtc/trunk/build/dir_exists.py b/media/webrtc/trunk/build/dir_exists.py
new file mode 100755
index 000000000..0a89bc87b
--- /dev/null
+++ b/media/webrtc/trunk/build/dir_exists.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+ sys.stdout.write(str(os.path.isdir(sys.argv[1])))
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/download_nacl_toolchains.py b/media/webrtc/trunk/build/download_nacl_toolchains.py
new file mode 100755
index 000000000..c2007d0f1
--- /dev/null
+++ b/media/webrtc/trunk/build/download_nacl_toolchains.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+import os
+import sys
+
+
+def Main(args):
+ # Exit early if disable_nacl=1.
+ if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
+ return 0
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ src_dir = os.path.dirname(script_dir)
+ nacl_dir = os.path.join(src_dir, 'native_client')
+ nacl_build_dir = os.path.join(nacl_dir, 'build')
+ download_script = os.path.join(nacl_build_dir, 'download_toolchains.py')
+ if not os.path.exists(download_script):
+ print "Can't find '%s'" % download_script
+ print 'Presumably you are intentionally building without NativeClient.'
+ print 'Skipping NativeClient toolchain download.'
+ sys.exit(0)
+ sys.path.insert(0, nacl_build_dir)
+ import download_toolchains
+
+ # TODO (robertm): Finish getting PNaCl ready for prime time.
+ # BUG:
+ # We remove this --optional-pnacl argument, and instead replace it with
+ # --no-pnacl for most cases. However, if the bot name is the pnacl_sdk
+ # bot then we will go ahead and download it. This prevents increasing the
+ # gclient sync time for developers, or standard Chrome bots.
+ if '--optional-pnacl' in args:
+ args.remove('--optional-pnacl')
+ # By default we don't use PNaCl toolchain yet, unless on ARM, where
+ # there is no other toolchain to build untrusted code at the moment.
+ # So analyze if we're building for ARM, or on SDK buildbot.
+ # TODO(olonho): we need to invent more reliable way to get build
+ # configuration info, to know if we're building for ARM.
+ use_pnacl = False
+ if 'target_arch=arm' in os.environ.get('GYP_DEFINES', ''):
+ use_pnacl = True
+ buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+ if buildbot_name.find('pnacl') >= 0 and buildbot_name.find('sdk') >= 0:
+ use_pnacl = True
+ if use_pnacl:
+ print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
+ else:
+ args.append('--no-pnacl')
+
+ # Append the name of the file to use as a version and hash source.
+ # NOTE: While not recommended, it is possible to redirect this file to
+ # a chrome location to avoid branching NaCl if just a toolchain needs
+ # to be bumped.
+ args.append(os.path.join(nacl_dir,'TOOL_REVISIONS'))
+
+ download_toolchains.main(args)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/media/webrtc/trunk/build/escape_unicode.py b/media/webrtc/trunk/build/escape_unicode.py
new file mode 100755
index 000000000..859ba5d03
--- /dev/null
+++ b/media/webrtc/trunk/build/escape_unicode.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert any unicode characters found in the input file to C literals."""
+
+import codecs
+import optparse
+import os
+import sys
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ usage = 'Usage: %prog -o <output_dir> <input_file>'
+ parser.set_usage(usage)
+ parser.add_option('-o', dest='output_dir')
+
+ options, arglist = parser.parse_args(argv)
+
+ if not options.output_dir:
+ print "output_dir required"
+ return 1
+
+ if len(arglist) != 2:
+ print "input_file required"
+ return 1
+
+ in_filename = arglist[1]
+
+ if not in_filename.endswith('.utf8'):
+ print "input_file should end in .utf8"
+ return 1
+
+ out_filename = os.path.join(options.output_dir, os.path.basename(
+ os.path.splitext(in_filename)[0]))
+
+ WriteEscapedFile(in_filename, out_filename)
+ return 0
+
+
+def WriteEscapedFile(in_filename, out_filename):
+ input_data = codecs.open(in_filename, 'r', 'utf8').read()
+ with codecs.open(out_filename, 'w', 'ascii') as out_file:
+ for i, char in enumerate(input_data):
+ if ord(char) > 127:
+ out_file.write(repr(char.encode('utf8'))[1:-1])
+ if input_data[i + 1:i + 2] in '0123456789abcdefABCDEF':
+ out_file.write('""')
+ else:
+ out_file.write(char.encode('ascii'))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/extract_from_cab.py b/media/webrtc/trunk/build/extract_from_cab.py
new file mode 100755
index 000000000..1c928af36
--- /dev/null
+++ b/media/webrtc/trunk/build/extract_from_cab.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+ """Run 'expand' supressing noisy output. Returns returncode from process."""
+ popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+ out, _ = popen.communicate()
+ if popen.returncode:
+ # expand emits errors to stdout, so if we fail, then print that out.
+ print out
+ return popen.returncode
+
+def main():
+ if len(sys.argv) != 4:
+ print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+ return 1
+
+ [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+ # Expand.exe does its work in a fixed-named temporary directory created within
+ # the given output directory. This is a problem for concurrent extractions, so
+ # create a unique temp dir within the desired output directory to work around
+ # this limitation.
+ temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+ try:
+ # Invoke the Windows expand utility to extract the file.
+ level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+ if level == 0:
+ # Move the output file into place, preserving expand.exe's behavior of
+ # paving over any preexisting file.
+ output_file = os.path.join(output_dir, archived_file)
+ try:
+ os.remove(output_file)
+ except OSError:
+ pass
+ os.rename(os.path.join(temp_dir, archived_file), output_file)
+ finally:
+ shutil.rmtree(temp_dir, True)
+
+ if level != 0:
+ return level
+
+ # The expand utility preserves the modification date and time of the archived
+ # file. Touch the extracted file. This helps build systems that compare the
+ # modification times of input and output files to determine whether to do an
+ # action.
+ os.utime(os.path.join(output_dir, archived_file), None)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/filename_rules.gypi b/media/webrtc/trunk/build/filename_rules.gypi
new file mode 100644
index 000000000..7b16a1559
--- /dev/null
+++ b/media/webrtc/trunk/build/filename_rules.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This gypi file defines the patterns used for determining whether a
+# file is excluded from the build on a given platform. It is
+# included by common.gypi for chromium_code.
+
+{
+ 'target_conditions': [
+ ['OS!="win" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_win(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)win/'],
+ ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+ }],
+ ['OS!="mac" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
+ ['exclude', '(^|/)(cocoa|mac)/'] ],
+ }],
+ ['OS!="ios" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
+ ['exclude', '(^|/)ios/'] ],
+ }],
+ ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '\\.mm?$' ] ],
+ }],
+ # Do not exclude the linux files on *BSD since most of them can be
+ # shared at this point.
+ # In case a file is not needed, it is going to be excluded later on.
+ # TODO(evan): the above is not correct; we shouldn't build _linux
+ # files on non-linux.
+ ['OS!="linux" and OS!="solaris" and <(os_bsd)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)linux/'],
+ ],
+ }],
+ ['OS!="android"', {
+ 'sources/': [
+ ['exclude', '_android(_unittest)?\\.cc$'],
+ ['exclude', '(^|/)android/'],
+ ],
+ }],
+ ['OS=="win" and >(nacl_untrusted_build)==0', {
+ 'sources/': [
+ ['exclude', '_posix(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)posix/'],
+ ],
+ }],
+ ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'] ]
+ }],
+ ['>(nacl_untrusted_build)==0', {
+ 'sources/': [
+ ['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
+ ],
+ }],
+ ['OS!="linux" and OS!="solaris" and <(os_bsd)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+ ],
+ }],
+ ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_(x|x11)(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+ ],
+ }],
+ ['(<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1) and (build_with_mozilla==0)', {
+ 'sources/': [
+ ['exclude', '_gtk(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)gtk/'],
+ ['exclude', '(^|/)gtk_[^/]*\\.(h|cc)$'],
+ ],
+ }],
+ ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_views\\.(h|cc)$'] ]
+ }],
+ ['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_aura(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)aura/'],
+ ]
+ }],
+ ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_aurax11\\.(h|cc)$'] ]
+ }],
+ ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+ }],
+ ['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_ash(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)ash/'],
+ ]
+ }],
+ ]
+}
diff --git a/media/webrtc/trunk/build/gdb-add-index b/media/webrtc/trunk/build/gdb-add-index
new file mode 100755
index 000000000..497553221
--- /dev/null
+++ b/media/webrtc/trunk/build/gdb-add-index
@@ -0,0 +1,47 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+
+set -e
+
+if [[ ! $# == 1 ]]; then
+ echo "Usage: $0 path-to-binary"
+ exit 1
+fi
+
+FILENAME="$1"
+if [[ ! -f "$FILENAME" ]]; then
+ echo "Path $FILENAME does not exist."
+ exit 1
+fi
+
+# We're good to go! Create temp directory for index files.
+DIRECTORY=$(mktemp -d)
+echo "Made temp directory $DIRECTORY."
+
+# Always remove directory on exit.
+trap "{ echo -n Removing temp directory $DIRECTORY...;
+ rm -rf $DIRECTORY; echo done; }" EXIT
+
+# Grab all the chromium shared library files.
+so_files=$(ldd "$FILENAME" 2>/dev/null \
+ | grep $(dirname "$FILENAME") \
+ | sed "s/.*[ \t]\(.*\) (.*/\1/")
+
+# Add index to binary and the shared library dependencies.
+for file in "$FILENAME" $so_files; do
+ basename=$(basename "$file")
+ echo -n "Adding index to $basename..."
+ readelf_out=$(readelf -S "$file")
+ if [[ $readelf_out =~ "gdb_index" ]]; then
+ echo "already contains index. Skipped."
+ else
+ gdb -batch "$file" -ex "save gdb-index $DIRECTORY" -ex "quit"
+ objcopy --add-section .gdb_index="$DIRECTORY"/$basename.gdb-index \
+ --set-section-flags .gdb_index=readonly "$file" "$file"
+ echo "done."
+ fi
+done
diff --git a/media/webrtc/trunk/build/grit_action.gypi b/media/webrtc/trunk/build/grit_action.gypi
new file mode 100644
index 000000000..0f0d7798e
--- /dev/null
+++ b/media/webrtc/trunk/build/grit_action.gypi
@@ -0,0 +1,33 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+# grit_grd_file: string: grd file path
+# grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don’t currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+ 'variables': {
+ 'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+ },
+ 'inputs': [
+ '<!@pymod_do_main(grit_info <@(grit_defines) --inputs <(grit_grd_file))',
+ ],
+ 'outputs': [
+ '<!@pymod_do_main(grit_info <@(grit_defines) --outputs \'<(grit_out_dir)\' <(grit_grd_file))',
+ ],
+ 'action': ['<@(grit_cmd)',
+ '-i', '<(grit_grd_file)', 'build',
+ '-fGRIT_DIR/../gritsettings/resource_ids',
+ '-o', '<(grit_out_dir)',
+ '<@(grit_defines)' ],
+ 'msvs_cygwin_shell': 0,
+ 'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/media/webrtc/trunk/build/grit_target.gypi b/media/webrtc/trunk/build/grit_target.gypi
new file mode 100644
index 000000000..fe9900b54
--- /dev/null
+++ b/media/webrtc/trunk/build/grit_target.gypi
@@ -0,0 +1,30 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target that will have one or more
+# uses of grit_action.gypi. To use this the following variables need to be
+# defined:
+# grit_out_dir: string: the output directory path
+
+# NOTE: This file is optional, not all targets that use grit include it, some
+# do their own custom directives instead.
+{
+ 'conditions': [
+ # If the target is a direct binary, it needs to be able to find the header,
+ # otherwise it probably a supporting target just for grit so the include
+ # dir needs to be set on anything that depends on this action.
+ ['_type=="executable" or _type=="shared_library" or \
+ _type=="loadable_module" or _type=="static_library"', {
+ 'include_dirs': [
+ '<(grit_out_dir)',
+ ],
+ }, {
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(grit_out_dir)',
+ ],
+ },
+ }],
+ ],
+}
diff --git a/media/webrtc/trunk/build/gyp_chromium b/media/webrtc/trunk/build/gyp_chromium
new file mode 100755
index 000000000..d13403403
--- /dev/null
+++ b/media/webrtc/trunk/build/gyp_chromium
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+import glob
+import os
+import shlex
+import subprocess
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
+
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds. Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+ try:
+ sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+ import psyco
+ except:
+ psyco = None
+else:
+ psyco = None
+
+def apply_gyp_environment(file_path=None):
+ """
+ Reads in a *.gyp_env file and applies the valid keys to os.environ.
+ """
+ if not file_path or not os.path.exists(file_path):
+ return
+ file_contents = open(file_path).read()
+ try:
+ file_data = eval(file_contents, {'__builtins__': None}, None)
+ except SyntaxError, e:
+ e.filename = os.path.abspath(file_path)
+ raise
+ supported_vars = ( 'CC',
+ 'CHROMIUM_GYP_FILE',
+ 'CHROMIUM_GYP_SYNTAX_CHECK',
+ 'CXX',
+ 'GYP_DEFINES',
+ 'GYP_GENERATOR_FLAGS',
+ 'GYP_GENERATOR_OUTPUT',
+ 'GYP_GENERATORS', )
+ for var in supported_vars:
+ val = file_data.get(var)
+ if val:
+ if var in os.environ:
+ print 'INFO: Environment value for "%s" overrides value in %s.' % (
+ var, os.path.abspath(file_path)
+ )
+ else:
+ os.environ[var] = val
+
+def additional_include_files(args=[]):
+ """
+ Returns a list of additional (.gypi) files to include, without
+ duplicating ones that are already specified on the command line.
+ """
+ # Determine the include files specified on the command line.
+ # This doesn't cover all the different option formats you can use,
+ # but it's mainly intended to avoid duplicating flags on the automatic
+ # makefile regeneration which only uses this format.
+ specified_includes = set()
+ for arg in args:
+ if arg.startswith('-I') and len(arg) > 2:
+ specified_includes.add(os.path.realpath(arg[2:]))
+
+ result = []
+ def AddInclude(path):
+ if os.path.realpath(path) not in specified_includes:
+ result.append(path)
+
+ # Always include common.gypi.
+ AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+ # Optionally add supplemental .gypi files if present.
+ supplements = glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+ for supplement in supplements:
+ AddInclude(supplement)
+
+ return result
+
+if __name__ == '__main__':
+ args = sys.argv[1:]
+
+ # Use the Psyco JIT if available.
+ if psyco:
+ psyco.profile()
+ print "Enabled Psyco JIT."
+
+ # Fall back on hermetic python if we happen to get run under cygwin.
+ # TODO(bradnelson): take this out once this issue is fixed:
+ # http://code.google.com/p/gyp/issues/detail?id=177
+ if sys.platform == 'cygwin':
+ python_dir = os.path.join(chrome_src, 'third_party', 'python_26')
+ env = os.environ.copy()
+ env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+ p = subprocess.Popen(
+ [os.path.join(python_dir, 'python.exe')] + sys.argv,
+ env=env, shell=False)
+ p.communicate()
+ sys.exit(p.returncode)
+
+ if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+ # Update the environment based on chromium.gyp_env
+ gyp_env_path = os.path.join(os.path.dirname(chrome_src), 'chromium.gyp_env')
+ apply_gyp_environment(gyp_env_path)
+
+ # This could give false positives since it doesn't actually do real option
+ # parsing. Oh well.
+ gyp_file_specified = False
+ for arg in args:
+ if arg.endswith('.gyp'):
+ gyp_file_specified = True
+ break
+
+ # If we didn't get a file, check an env var, and then fall back to
+ # assuming 'all.gyp' from the same directory as the script.
+ if not gyp_file_specified:
+ gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+ if gyp_file:
+ # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+ # path separators even on Windows due to the use of shlex.split().
+ args.extend(shlex.split(gyp_file))
+ else:
+ args.append(os.path.join(script_dir, 'all.gyp'))
+
+ args.extend(['-I' + i for i in additional_include_files(args)])
+
+ # There shouldn't be a circular dependency relationship between .gyp files,
+ # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+ # currently exist. The check for circular dependencies is currently
+ # bypassed on other platforms, but is left enabled on the Mac, where a
+ # violation of the rule causes Xcode to misbehave badly.
+ # TODO(mark): Find and kill remaining circular dependencies, and remove this
+ # option. http://crbug.com/35878.
+ # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+ # list.
+ if sys.platform not in ('darwin',):
+ args.append('--no-circular-check')
+
+ # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+ # to enfore syntax checking.
+ syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+ if syntax_check and int(syntax_check):
+ args.append('--check')
+
+ print 'Updating projects from gyp files...'
+ sys.stdout.flush()
+
+ # Off we go...
+ sys.exit(gyp.main(args))
diff --git a/media/webrtc/trunk/build/install-build-deps-android.sh b/media/webrtc/trunk/build/install-build-deps-android.sh
new file mode 100755
index 000000000..0a90d3b10
--- /dev/null
+++ b/media/webrtc/trunk/build/install-build-deps-android.sh
@@ -0,0 +1,101 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android that
+# requires sudo privileges.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
+# a license agreement, so upon installation it will prompt the user. To get
+# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+ echo "Only x86 architectures are currently supported" >&2
+ exit
+fi
+
+if [ "x$(id -u)" != x0 ]; then
+ echo "Running as non-root user."
+ echo "You might have to enter your password one or more times for 'sudo'."
+ echo
+fi
+
+# The temporary directory used to store output of update-java-alternatives
+TEMPDIR=$(mktemp -d)
+cleanup() {
+ local status=${?}
+ trap - EXIT
+ rm -rf "${TEMPDIR}"
+ exit ${status}
+}
+trap cleanup EXIT
+
+sudo apt-get update
+
+# Fix deps
+sudo apt-get -f install
+
+# Install deps
+# This step differs depending on what Ubuntu release we are running
+# on since the package names are different, and Sun's Java must
+# be installed manually on late-model versions.
+
+# common
+sudo apt-get -y install python-pexpect xvfb x11-utils
+
+if /usr/bin/lsb_release -r -s | grep -q "12."; then
+ # Ubuntu 12.x
+ sudo apt-get -y install ant
+
+ # Java can not be installed via ppa on Ubuntu 12.04+ so we'll
+ # simply check to see if it has been setup properly -- if not
+ # let the user know.
+
+ if ! java -version 2>&1 | grep -q "Java(TM)"; then
+ echo "****************************************************************"
+ echo "You need to install the Oracle Java SDK from http://goo.gl/uPRSq"
+ echo "and configure it as the default command-line Java environment."
+ echo "****************************************************************"
+ exit
+ fi
+
+else
+ # Ubuntu 10.x
+
+ sudo apt-get -y install ant1.8
+
+ # Install sun-java6 stuff
+ sudo apt-get -y install sun-java6-bin sun-java6-jre sun-java6-jdk
+
+ # Switch version of Java to java-6-sun
+ # Sun's java is missing certain Java plugins (e.g. for firefox, mozilla).
+ # These are not required to build, and thus are treated only as warnings.
+ # Any errors in updating java alternatives which are not '*-javaplugin.so'
+ # will cause errors and stop the script from completing successfully.
+ if ! sudo update-java-alternatives -s java-6-sun \
+ >& "${TEMPDIR}"/update-java-alternatives.out
+ then
+ # Check that there are the expected javaplugin.so errors for the update
+ if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
+ /dev/null
+ then
+ # Print as warnings all the javaplugin.so errors
+ echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
+ grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+ fi
+ # Check if there are any errors that are not javaplugin.so
+ if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
+ >& /dev/null
+ then
+ # If there are non-javaplugin.so errors, treat as errors and exit
+ echo 'ERRORS: Failed to update alternatives for java-6-sun:'
+ grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+ exit 1
+ fi
+ fi
+fi
+
+echo "install-build-deps-android.sh complete."
diff --git a/media/webrtc/trunk/build/install-build-deps.sh b/media/webrtc/trunk/build/install-build-deps.sh
new file mode 100755
index 000000000..b77e23a6d
--- /dev/null
+++ b/media/webrtc/trunk/build/install-build-deps.sh
@@ -0,0 +1,414 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
+# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+
+usage() {
+ echo "Usage: $0 [--options]"
+ echo "Options:"
+ echo "--[no-]syms: enable or disable installation of debugging symbols"
+ echo "--[no-]lib32: enable or disable installation of 32 bit libraries"
+ echo "--no-prompt: silently select standard options/defaults"
+ echo "Script will prompt interactively if options not given."
+ exit 1
+}
+
+while test "$1" != ""
+do
+ case "$1" in
+ --syms) do_inst_syms=1;;
+ --no-syms) do_inst_syms=0;;
+ --lib32) do_inst_lib32=1;;
+ --no-lib32) do_inst_lib32=0;;
+ --no-prompt) do_default=1
+ do_quietly="-qq --assume-yes"
+ ;;
+ *) usage;;
+ esac
+ shift
+done
+
+if ! egrep -q \
+ 'Ubuntu (10\.04|10\.10|11\.04|11\.10|12\.04|lucid|maverick|natty|oneiric|precise)' \
+ /etc/issue; then
+ echo "Only Ubuntu 10.04 (lucid) through 12.04 (precise) are currently" \
+ "supported" >&2
+ exit 1
+fi
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+ echo "Only x86 architectures are currently supported" >&2
+ exit
+fi
+
+if [ "x$(id -u)" != x0 ]; then
+ echo "Running as non-root user."
+ echo "You might have to enter your password one or more times for 'sudo'."
+ echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libpulse-dev"
+
+# Packages need for development
+dev_list="apache2.2-bin bison curl elfutils fakeroot flex g++ gperf
+ language-pack-fr libapache2-mod-php5 libasound2-dev libbz2-dev
+ libcairo2-dev libcups2-dev libcurl4-gnutls-dev libdbus-glib-1-dev
+ libelf-dev libgconf2-dev libgl1-mesa-dev libglib2.0-dev
+ libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev
+ libkrb5-dev libnspr4-dev libnss3-dev libpam0g-dev libsctp-dev
+ libsqlite3-dev libssl-dev libudev-dev libwww-perl libxslt1-dev
+ libxss-dev libxt-dev libxtst-dev mesa-common-dev patch
+ perl php5-cgi pkg-config python python-cherrypy3 python-dev
+ python-psutil rpm ruby subversion ttf-dejavu-core ttf-indic-fonts
+ ttf-kochi-gothic ttf-kochi-mincho ttf-thai-tlwg wdiff git-core
+ $chromeos_dev_list"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries. These are always needed, regardless of whether or not we want
+# the full 32-bit "cross-compile" support (--lib32).
+if [ "$(uname -m)" = "x86_64" ]; then
+ dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0 libcurl4-gnutls-dev"
+
+# Full list of required run-time libraries
+lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcups2 libdbus-glib-1-2
+ libexpat1 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
+ libgtk2.0-0 libpam0g libpango1.0-0 libpcre3 libpixman-1-0
+ libpng12-0 libstdc++6 libsqlite3-0 libudev0 libx11-6 libxau6 libxcb1
+ libxcomposite1 libxcursor1 libxdamage1 libxdmcp6 libxext6 libxfixes3
+ libxi6 libxinerama1 libxrandr2 libxrender1 libxtst6 zlib1g
+ $chromeos_lib_list"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libdbus-glib-1-2-dbg
+ libfontconfig1-dbg libglib2.0-0-dbg libgtk2.0-0-dbg
+ libpango1.0-0-dbg libpcre3-dbg libpixman-1-0-dbg
+ libsqlite3-0-dbg
+ libx11-6-dbg libxau6-dbg libxcb1-dbg libxcomposite1-dbg
+ libxcursor1-dbg libxdamage1-dbg libxdmcp6-dbg libxext6-dbg
+ libxfixes3-dbg libxi6-dbg libxinerama1-dbg libxrandr2-dbg
+ libxrender1-dbg libxtst6-dbg zlib1g-dbg"
+
+# Plugin lists needed for tests.
+plugin_list="flashplugin-installer"
+
+# Some package names have changed over time
+if apt-cache show ttf-mscorefonts-installer >/dev/null 2>&1; then
+ dev_list="${dev_list} ttf-mscorefonts-installer"
+else
+ dev_list="${dev_list} msttcorefonts"
+fi
+if apt-cache show libnspr4-dbg >/dev/null 2>&1; then
+ dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+ lib_list="${lib_list} libnspr4 libnss3"
+else
+ dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+ lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if apt-cache show libjpeg-dev >/dev/null 2>&1; then
+ dev_list="${dev_list} libjpeg-dev"
+else
+ dev_list="${dev_list} libjpeg62-dev"
+fi
+
+# Some packages are only needed, if the distribution actually supports
+# installing them.
+if apt-cache show appmenu-gtk >/dev/null 2>&1; then
+ lib_list="$lib_list appmenu-gtk"
+fi
+
+# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
+# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
+# been provided to yes_no(), the function also accepts RETURN as a user input.
+# The parameter specifies the exit code that should be returned in that case.
+# The function will echo the user's selection followed by a newline character.
+# Users can abort the function by pressing CTRL-C. This will call "exit 1".
+yes_no() {
+ if [ 0 -ne "${do_default-0}" ] ; then
+ return $1
+ fi
+ local c
+ while :; do
+ c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
+ stty -echo iuclc -icanon 2>/dev/null
+ dd count=1 bs=1 2>/dev/null | od -An -tx1)"
+ case "$c" in
+ " 0a") if [ -n "$1" ]; then
+ [ $1 -eq 0 ] && echo "Y" || echo "N"
+ return $1
+ fi
+ ;;
+ " 79") echo "Y"
+ return 0
+ ;;
+ " 6e") echo "N"
+ return 1
+ ;;
+ "") echo "Aborted" >&2
+ exit 1
+ ;;
+ *) # The user pressed an unrecognized key. As we are not echoing
+ # any incorrect user input, alert the user by ringing the bell.
+ (tput bel) 2>/dev/null
+ ;;
+ esac
+ done
+}
+
+if test "$do_inst_syms" = ""
+then
+ echo "This script installs all tools and libraries needed to build Chromium."
+ echo ""
+ echo "For most of the libraries, it can also install debugging symbols, which"
+ echo "will allow you to debug code in the system libraries. Most developers"
+ echo "won't need these symbols."
+ echo -n "Do you want me to install them for you (y/N) "
+ if yes_no 1; then
+ do_inst_syms=1
+ fi
+fi
+if test "$do_inst_syms" = "1"; then
+ echo "Installing debugging symbols."
+else
+ echo "Skipping installation of debugging symbols."
+ dbg_list=
+fi
+
+sudo apt-get update
+
+# We initially run "apt-get" with the --reinstall option and parse its output.
+# This way, we can find all the packages that need to be newly installed
+# without accidentally promoting any packages from "auto" to "manual".
+# We then re-run "apt-get" with just the list of missing packages.
+echo "Finding missing packages..."
+packages="${dev_list} ${lib_list} ${dbg_list} ${plugin_list}"
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
+if new_list="$(yes n | LANG=C $new_list_cmd)"; then
+ # We probably never hit this following line.
+ echo "No missing packages, and the packages are up-to-date."
+elif [ $? -eq 1 ]; then
+ # We expect apt-get to have exit status of 1.
+ # This indicates that we cancelled the install with "yes n|".
+ new_list=$(echo "$new_list" |
+ sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d')
+ new_list=$(echo "$new_list" | sed 's/ *$//')
+ if [ -z "$new_list" ] ; then
+ echo "No missing packages, and the packages are up-to-date."
+ else
+ echo "Installing missing packages: $new_list."
+ sudo apt-get install ${do_quietly-} ${new_list}
+ fi
+ echo
+else
+ # An apt-get exit status of 100 indicates that a real error has occurred.
+
+ # I am intentionally leaving out the '"'s around new_list_cmd,
+ # as this makes it easier to cut and paste the output
+ echo "The following command failed: " ${new_list_cmd}
+ echo
+ echo "It produces the following output:"
+ yes n | $new_list_cmd || true
+ echo
+ echo "You will have to install the above packages yourself."
+ echo
+ exit 100
+fi
+
+# Install 32bit backwards compatibility support for 64bit systems
+if [ "$(uname -m)" = "x86_64" ]; then
+ if test "$do_inst_lib32" = ""
+ then
+ echo "We no longer recommend that you use this script to install"
+ echo "32bit libraries on a 64bit system. Instead, consider using"
+ echo "the install-chroot.sh script to help you set up a 32bit"
+ echo "environment for building and testing 32bit versions of Chrome."
+ echo
+ echo "If you nonetheless want to try installing 32bit libraries"
+ echo "directly, you can do so by explicitly passing the --lib32"
+ echo "option to install-build-deps.sh."
+ fi
+ if test "$do_inst_lib32" != "1"
+ then
+ echo "Exiting without installing any 32bit libraries."
+ exit 0
+ fi
+
+ echo "N.B. the code for installing 32bit libraries on a 64bit"
+ echo " system is no longer actively maintained and might"
+ echo " not work with modern versions of Ubuntu or Debian."
+ echo
+
+ # Standard 32bit compatibility libraries
+ echo "First, installing the limited existing 32-bit support..."
+ cmp_list="ia32-libs lib32asound2-dev lib32stdc++6 lib32z1
+ lib32z1-dev libc6-dev-i386 libc6-i386 g++-multilib"
+ if [ -n "`apt-cache search lib32readline-gplv2-dev 2>/dev/null`" ]; then
+ cmp_list="${cmp_list} lib32readline-gplv2-dev"
+ else
+ cmp_list="${cmp_list} lib32readline5-dev"
+ fi
+ sudo apt-get install ${do_quietly-} $cmp_list
+
+ tmp=/tmp/install-32bit.$$
+ trap 'rm -rf "${tmp}"' EXIT INT TERM QUIT
+ mkdir -p "${tmp}/apt/lists/partial" "${tmp}/cache" "${tmp}/partial"
+ touch "${tmp}/status"
+
+ [ -r /etc/apt/apt.conf ] && cp /etc/apt/apt.conf "${tmp}/apt/"
+ cat >>"${tmp}/apt/apt.conf" <<EOF
+ Apt::Architecture "i386";
+ Dir::Cache "${tmp}/cache";
+ Dir::Cache::Archives "${tmp}/";
+ Dir::State::Lists "${tmp}/apt/lists/";
+ Dir::State::status "${tmp}/status";
+EOF
+
+ # Download 32bit packages
+ echo "Computing list of available 32bit packages..."
+ sudo apt-get -c="${tmp}/apt/apt.conf" update
+
+ echo "Downloading available 32bit packages..."
+ sudo apt-get -c="${tmp}/apt/apt.conf" \
+ --yes --download-only --force-yes --reinstall install \
+ ${lib_list} ${dbg_list}
+
+ # Open packages, remove everything that is not a library, move the
+ # library to a lib32 directory and package everything as a *.deb file.
+ echo "Repackaging and installing 32bit packages for use on 64bit systems..."
+ for i in ${lib_list} ${dbg_list}; do
+ orig="$(echo "${tmp}/${i}"_*_i386.deb)"
+ compat="$(echo "${orig}" |
+ sed -e 's,\(_[^_/]*_\)i386\(.deb\),-ia32\1amd64\2,')"
+ rm -rf "${tmp}/staging"
+ msg="$(fakeroot -u sh -exc '
+ # Unpack 32bit Debian archive
+ umask 022
+ mkdir -p "'"${tmp}"'/staging/dpkg/DEBIAN"
+ cd "'"${tmp}"'/staging"
+ ar x "'${orig}'"
+ tar zCfx dpkg data.tar.gz
+ tar zCfx dpkg/DEBIAN control.tar.gz
+
+ # Create a posix extended regular expression fragment that will
+ # recognize the includes which have changed. Should be rare,
+ # will almost always be empty.
+ includes=`sed -n -e "s/^[0-9a-z]* //g" \
+ -e "\,usr/include/,p" dpkg/DEBIAN/md5sums |
+ xargs -n 1 -I FILE /bin/sh -c \
+ "cmp -s dpkg/FILE /FILE || echo FILE" |
+ tr "\n" "|" |
+ sed -e "s,|$,,"`
+
+ # If empty, set it to not match anything.
+ test -z "$includes" && includes="^//"
+
+ # Turn the conflicts into an extended RE for removal from the
+ # Provides line.
+ conflicts=`sed -n -e "/Conflicts/s/Conflicts: *//;T;s/, */|/g;p" \
+ dpkg/DEBIAN/control`
+
+ # Rename package, change architecture, remove conflicts and dependencies
+ sed -r -i \
+ -e "/Package/s/$/-ia32/" \
+ -e "/Architecture/s/:.*$/: amd64/" \
+ -e "/Depends/s/:.*/: ia32-libs/" \
+ -e "/Provides/s/($conflicts)(, *)?//g;T1;s/, *$//;:1" \
+ -e "/Recommends/d" \
+ -e "/Conflicts/d" \
+ dpkg/DEBIAN/control
+
+ # Only keep files that live in "lib" directories or the includes
+ # that have changed.
+ sed -r -i \
+ -e "/\/lib64\//d" -e "/\/.?bin\//d" \
+ -e "\,$includes,s,[ /]include/,&32/,g;s,include/32/,include32/,g" \
+ -e "s, lib/, lib32/,g" \
+ -e "s,/lib/,/lib32/,g" \
+ -e "t;d" \
+ -e "\,^/usr/lib32/debug\(.*/lib32\),s,^/usr/lib32/debug,/usr/lib/debug," \
+ dpkg/DEBIAN/md5sums
+
+ # Re-run ldconfig after installation/removal
+ { echo "#!/bin/sh"; echo "[ \"x\$1\" = xconfigure ]&&ldconfig||:"; } \
+ >dpkg/DEBIAN/postinst
+ { echo "#!/bin/sh"; echo "[ \"x\$1\" = xremove ]&&ldconfig||:"; } \
+ >dpkg/DEBIAN/postrm
+ chmod 755 dpkg/DEBIAN/postinst dpkg/DEBIAN/postrm
+
+ # Remove any other control files
+ find dpkg/DEBIAN -mindepth 1 "(" -name control -o -name md5sums -o \
+ -name postinst -o -name postrm ")" -o -print |
+ xargs -r rm -rf
+
+ # Remove any files/dirs that live outside of "lib" directories,
+ # or are not in our list of changed includes.
+ find dpkg -mindepth 1 -regextype posix-extended \
+ "(" -name DEBIAN -o -name lib -o -regex "dpkg/($includes)" ")" \
+ -prune -o -print | tac |
+ xargs -r -n 1 sh -c "rm \$0 2>/dev/null || rmdir \$0 2>/dev/null || : "
+ find dpkg -name lib64 -o -name bin -o -name "?bin" |
+ tac | xargs -r rm -rf
+
+ # Remove any symbolic links that were broken by the above steps.
+ find -L dpkg -type l -print | tac | xargs -r rm -rf
+
+ # Rename lib to lib32, but keep debug symbols in /usr/lib/debug/usr/lib32
+ # That is where gdb looks for them.
+ find dpkg -type d -o -path "*/lib/*" -print |
+ xargs -r -n 1 sh -c "
+ i=\$(echo \"\${0}\" |
+ sed -e s,/lib/,/lib32/,g \
+ -e s,/usr/lib32/debug\\\\\(.*/lib32\\\\\),/usr/lib/debug\\\\1,);
+ mkdir -p \"\${i%/*}\";
+ mv \"\${0}\" \"\${i}\""
+
+ # Rename include to include32.
+ [ -d "dpkg/usr/include" ] && mv "dpkg/usr/include" "dpkg/usr/include32"
+
+ # Prune any empty directories
+ find dpkg -type d | tac | xargs -r -n 1 rmdir 2>/dev/null || :
+
+ # Create our own Debian package
+ cd ..
+ dpkg --build staging/dpkg .' 2>&1)"
+ compat="$(eval echo $(echo "${compat}" |
+ sed -e 's,_[^_/]*_amd64.deb,_*_amd64.deb,'))"
+ [ -r "${compat}" ] || {
+ echo "${msg}" >&2
+ echo "Failed to build new Debian archive!" >&2
+ exit 1
+ }
+
+ msg="$(sudo dpkg -i "${compat}" 2>&1)" && {
+ echo "Installed ${compat##*/}"
+ } || {
+ # echo "${msg}" >&2
+ echo "Skipped ${compat##*/}"
+ }
+ done
+
+ # Add symbolic links for developing 32bit code
+ echo "Adding missing symbolic links, enabling 32bit code development..."
+ for i in $(find /lib32 /usr/lib32 -maxdepth 1 -name \*.so.\* |
+ sed -e 's/[.]so[.][0-9].*/.so/' |
+ sort -u); do
+ [ "x${i##*/}" = "xld-linux.so" ] && continue
+ [ -r "$i" ] && continue
+ j="$(ls "$i."* | sed -e 's/.*[.]so[.]\([^.]*\)$/\1/;t;d' |
+ sort -n | tail -n 1)"
+ [ -r "$i.$j" ] || continue
+ sudo ln -s "${i##*/}.$j" "$i"
+ done
+fi
diff --git a/media/webrtc/trunk/build/install-chroot.sh b/media/webrtc/trunk/build/install-chroot.sh
new file mode 100755
index 000000000..d2c06fc39
--- /dev/null
+++ b/media/webrtc/trunk/build/install-chroot.sh
@@ -0,0 +1,809 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the prefered name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+ echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+ echo "-b dir additional directories that should be bind mounted,"
+ echo ' or "NONE".'
+ echo " Default: if local filesystems present, ask user for help"
+ echo "-g group,... groups that can use the chroot unauthenticated"
+ echo " Default: '${admin}' and current user's group ('$(id -gn)')"
+ echo "-l List all installed chroot environments"
+ echo "-m mirror an alternate repository mirror for package downloads"
+ echo "-s configure default deb-srcs"
+ echo "-c always copy 64bit helper binaries to 32bit chroot"
+ echo "-h this help message"
+}
+
+process_opts() {
+ local OPTNAME OPTIND OPTERR OPTARG
+ while getopts ":b:g:lm:sch" OPTNAME; do
+ case "$OPTNAME" in
+ b)
+ if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+ bind_mounts="${OPTARG}"
+ else
+ if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+ ! -d "${OPTARG}" ]; then
+ echo "Invalid -b option(s)"
+ usage
+ exit 1
+ fi
+ bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+ fi
+ ;;
+ g)
+ [ -n "${OPTARG}" ] &&
+ chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+ ;;
+ l)
+ list_all_chroots
+ exit
+ ;;
+ m)
+ if [ -n "${mirror}" ]; then
+ echo "You can only specify exactly one mirror location"
+ usage
+ exit 1
+ fi
+ mirror="$OPTARG"
+ ;;
+ s)
+ add_srcs="y"
+ ;;
+ c)
+ copy_64="y"
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ \:)
+ echo "'-$OPTARG' needs an argument."
+ usage
+ exit 1
+ ;;
+ *)
+ echo "invalid command-line option: $OPTARG"
+ usage
+ exit 1
+ ;;
+ esac
+ done
+
+ if [ $# -ge ${OPTIND} ]; then
+ eval echo "Unexpected command line argument: \${${OPTIND}}"
+ usage
+ exit 1
+ fi
+}
+
+list_all_chroots() {
+ for i in /var/lib/chroot/*; do
+ i="${i##*/}"
+ [ "${i}" = "*" ] && continue
+ [ -x "/usr/local/bin/${i%bit}" ] || continue
+ grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+ [ -r "/etc/schroot/script-${i}" -a \
+ -r "/etc/schroot/mount-${i}" ] || continue
+ echo "${i%bit}"
+ done
+}
+
+getkey() {
+ (
+ trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+ stty -echo iuclc -icanon 2>/dev/null
+ dd count=1 bs=1 2>/dev/null
+ )
+}
+
+chr() {
+ printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+ printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+ stat -c %T -f "$1/" 2>/dev/null |
+ egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+ echo "Run this script as a regular user and provide your \"sudo\"" \
+ "password if requested" >&2
+ exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+ [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+ ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+ printf '%4d: %s\n' "$j" "$i"
+ j=$(($j+1))
+done
+while :; do
+ printf "Which target would you like to install: "
+ read n
+ [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+ [ "$j" -eq "$n" ] && { distname="$i"; break; }
+ j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+ while :; do
+ echo "You are running a 64bit kernel. This allows you to install either a"
+ printf "32bit or a 64bit chroot environment. %s" \
+ "Which one do you want (32, 64) "
+ read arch
+ [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+ done
+ [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+ arch="${arch}bit"
+ echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+ while :; do
+ echo "This chroot already exists on your machine."
+ if schroot -l --all-sessions 2>&1 |
+ sed 's/^session://' |
+ grep -qs "^${target%bit}-"; then
+ echo "And it appears to be in active use. Terminate all programs that"
+ echo "are currently using the chroot environment and then re-run this"
+ echo "script."
+ echo "If you still get an error message, you might have stale mounts"
+ echo "that you forgot to delete. You can always clean up mounts by"
+ echo "executing \"${target%bit} -c\"."
+ exit 1
+ fi
+ echo "I can abort installation, I can overwrite the existing chroot,"
+ echo "or I can delete the old one and then exit. What would you like to"
+ printf "do (a/o/d)? "
+ read choice
+ case "${choice}" in
+ a|A) exit 1;;
+ o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+ d|D) sudo rm -rf "/var/lib/chroot/${target}" \
+ "/usr/local/bin/${target%bit}" \
+ "/etc/schroot/mount-${target}" \
+ "/etc/schroot/script-${target}"
+ sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+ :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+ "/etc/schroot/schroot.conf"
+ trap '' INT TERM QUIT HUP
+ trap '' EXIT
+ echo "Deleted!"
+ exit 0;;
+ esac
+ done
+ echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+ while :; do
+ echo "Would you like to add ${distname}-updates and ${distname}-security "
+ printf "to the chroot's sources.list (y/n)? "
+ read alt_repos
+ case "${alt_repos}" in
+ y|Y)
+ alt_repos="y"
+ break
+ ;;
+ n|N)
+ break
+ ;;
+ esac
+ done
+ echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+ mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+ $2 !~ "^/media" && $2 !~ "^/run" &&
+ ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+ $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+ $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+ $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+ print $2
+ }' /proc/mounts |
+ head -n26)"
+ if [ -n "${mounts}" ]; then
+ echo "You appear to have non-standard mount points that you"
+ echo "might want to import into the chroot environment:"
+ echo
+ sel=
+ while :; do
+ # Print a menu, listing all non-default mounts of local or network
+ # file systems.
+ j=1; for m in ${mounts}; do
+ c="$(printf $(printf '\\%03o' $((64+$j))))"
+ echo "$sel" | grep -qs $c &&
+ state="mounted in chroot" || state="$(tput el)"
+ printf " $c) %-40s${state}\n" "$m"
+ j=$(($j+1))
+ done
+ # Allow user to interactively (de-)select any of the entries
+ echo
+ printf "Select mount points that you want to be included or press %s" \
+ "SPACE to continue"
+ c="$(getkey | tr a-z A-Z)"
+ [ "$c" == " " ] && { echo; echo; break; }
+ if [ -z "$c" ] ||
+ [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+ # Invalid input, ring the console bell
+ tput bel
+ else
+ # Toggle the selection for the given entry
+ if echo "$sel" | grep -qs $c; then
+ sel="$(printf "$sel" | sed "s/$c//")"
+ else
+ sel="$sel$c"
+ fi
+ fi
+ # Reposition cursor to the top of the list of entries
+ tput cuu $(($j + 1))
+ echo
+ done
+ fi
+ j=1; for m in ${mounts}; do
+ c="$(chr $(($j + 64)))"
+ if echo "$sel" | grep -qs $c; then
+ bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+ fi
+ j=$(($j+1))
+ done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+ /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+ mirror="http://archive.ubuntu.com/ubuntu" ||
+ mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+ "${distname}" "/var/lib/chroot/${target}" "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+ brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+ chroot_groups="${admin},$(id -gn)"
+fi
+# Older versions of schroot wanted a "priority=" line, whereas recent
+# versions deprecate "priority=" and warn if they see it. We don't have
+# a good feature test, but scanning for the string "priority=" in the
+# existing "schroot.conf" file is a good indication of what to do.
+priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+ echo 'priority=3' || :)
+sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+# Set up a list of mount points that is specific to this
+# chroot environment.
+sed '/^FSTAB=/s,"[^"]*","/etc/schroot/mount-'"${target}"'",' \
+ /etc/schroot/script-defaults |
+ sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+ /etc/schroot/mount-defaults |
+ sudo sh -c 'cat > /etc/schroot/mount-'"${target}"
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+ printf "${bind_mounts}" |
+ sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+ ! grep -qs '^/media' /etc/schroot/mount-"${target}"; then
+ echo '/media /media none rw,rbind 0 0' |
+ sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+
+# Share /dev/shm and possibly /run/shm
+grep -qs '^/dev/shm' /etc/schroot/mount-"${target}" ||
+ echo '/dev/shm /dev/shm none rw,bind 0 0' |
+ sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+if [ -d "/var/lib/chroot/${target}/run" ] &&
+ ! grep -qs '^/run/shm' /etc/schroot/mount-"${target}"; then
+ { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+ echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+ sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+ sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+ # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+ # insert the same number of spaces as the number of characters in the
+ # parameter(s) passed to this function.
+ # If the "fold" program cannot be found, or if the actual width of the
+ # terminal cannot be determined, this function doesn't attempt to do any
+ # wrapping.
+ local f="$(type -P fold)"
+ [ -z "${f}" ] && { cat; return; }
+ local c="$(stty -a </dev/tty 2>/dev/null |
+ sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+ [ -z "${c}" ] && { cat; return; }
+ local i="$(echo "$*"|sed 's/./ /g')"
+ local j="$(printf %s "${i}"|wc -c)"
+ if [ "${c}" -gt "${j}" ]; then
+ dd bs=1 count="${j}" 2>/dev/null
+ "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+ else
+ "${f}" -sw "${c}"
+ fi
+}
+
+help() {
+ echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+ echo " help: print this message" | wrap " "
+ echo " list: list all known chroot environments" | wrap " "
+ echo " clean: remove all old chroot sessions for \"${chroot}\"" | wrap " "
+ echo " clean-all: remove all old chroot sessions for all environments" | wrap " "
+ exit 0
+}
+
+clean() {
+ local s t rc
+ rc=0
+ for s in $(schroot -l --all-sessions); do
+ if [ -n "$1" ]; then
+ t="${s#session:}"
+ [ "${t#${chroot}-}" == "${t}" ] && continue
+ fi
+ if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+ fgrep -qs "/var/lib/schroot/mount/${t}"; then
+ echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+ rc=1
+ continue
+ fi
+ sudo schroot -c "${s}" -e || rc=1
+ done
+ exit ${rc}
+}
+
+list() {
+ for e in $(schroot -l); do
+ e="${e#chroot:}"
+ [ -x "/usr/local/bin/${e}" ] || continue
+ if schroot -l --all-sessions 2>/dev/null |
+ sed 's/^session://' |
+ grep -qs "^${e}-"; then
+ echo "${e} is currently active"
+ else
+ echo "${e}"
+ fi
+ done
+ exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+ case "$1" in
+ --) shift; break;;
+ -h|--help) shift; help;;
+ -l|--list) shift; list;;
+ -c|--clean) shift; clean "${chroot}";;
+ -C|--clean-all) shift; clean;;
+ *) break;;
+ esac
+done
+
+session="$(schroot -c "${chroot}" -b)"
+
+if [ $# -eq 0 ]; then
+ schroot -c "${session}" -r -p
+else
+ p="$1"; shift
+ schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+ awk '{ print $1 }') 2>/dev/null
+while [ -n "$i" ]; do
+ pids=$(ls -id1 /proc/*/root/. 2>/dev/null |
+ sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+ t
+ d') >/dev/null 2>&1
+ [ -z "$pids" ] && break
+ kill -9 $pids
+done
+schroot -c "${session}" -e
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+ s/^\(deb .* [^ -]\+\) main/\1-security main/
+ p
+ t1
+ d
+ :1;s/-security main/-updates main/
+ t
+ d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+ "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+ HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+ >&/dev/null; then
+ sudo sh -c '
+ echo "deb http://archive.canonical.com/ubuntu" \
+ "'"${distname}"' partner" \
+ >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+ s/^deb\([^-]\)/deb-src\1/' \
+ "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+ sudo sh -c '
+ echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+ >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+ apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+ [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+ sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+ l='"${LANG:-en_US}"'; l="${l%%.*}"
+ [ -r /etc/locale.gen ] &&
+ sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+ locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+ [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+ sudo sed -i 's/ / [arch=amd64,i386] /' \
+ "/var/lib/chroot/${target}/etc/apt/sources.list"
+ [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+ echo foreign-architecture \
+ $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+ sudo sh -c "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+ egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+ echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install \
+ autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool \
+ strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+ cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+ "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+ file /bin/bash 2>/dev/null | grep -q x86-64; then
+ readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+ 'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+ sudo "/usr/local/bin/${target%bit}" apt-get -y install \
+ lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1
+ dep=
+ for i in binutils gdb; do
+ [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+ done
+ [ -n "$dep" ] && sudo apt-get -y install $dep
+ sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+ for i in libbfd libpython; do
+ lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+ grep -s "$i" | awk '{ print $3 }')"
+ if [ -n "$lib" -a -r "$lib" ]; then
+ sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+ fi
+ done
+ for lib in libssl libcrypt; do
+ for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+ sudo cp $path/$lib* \
+ "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+ done
+ done
+ for i in gdb ld; do
+ sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+ sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+ /usr/local/lib/amd64/$i "\$@"
+EOF
+ sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+ done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+ while :; do
+ echo
+ echo "If you plan on building Chrome inside of the new chroot environment,"
+ echo "you now have to install the build dependencies. Do you want me to"
+ printf "start the script that does this for you (y/n)? "
+ read install_deps
+ case "${install_deps}" in
+ y|Y)
+ echo
+ # We prefer running the script in-place, but this might not be
+ # possible, if it lives on a network filesystem that denies
+ # access to root.
+ tmp_script=
+ if ! sudo /usr/local/bin/"${target%bit}" \
+ sh -c "[ -x '${script}' ]" >&/dev/null; then
+ tmp_script="/tmp/${script##*/}"
+ cp "${script}" "${tmp_script}"
+ fi
+ # Some distributions automatically start an instance of the system-
+ # wide dbus daemon, cron daemon or of the logging daemon, when
+ # installing the Chrome build depencies. This prevents the chroot
+ # session from being closed. So, we always try to shut down any running
+ # instance of dbus and rsyslog.
+ sudo /usr/local/bin/"${target%bit}" sh -c "${script} --no-lib32;
+ rc=$?;
+ /etc/init.d/cron stop >/dev/null 2>&1 || :;
+ /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+ /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+ exit $rc"
+ rc=$?
+ [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+ [ $rc -ne 0 ] && exit $rc
+ break
+ ;;
+ n|N)
+ break
+ ;;
+ esac
+ done
+ echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+ ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+ { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+ is_network_drive "${HOME}/chroot"; } &&
+ ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+ echo "${HOME}/chroot is currently located on the same device as your"
+ echo "home directory."
+ echo "This might not be what you want. Do you want me to move it somewhere"
+ echo "else?"
+ # If the computer has multiple spindles, many users configure all or part of
+ # the secondary hard disk to be writable by the primary user of this machine.
+ # Make some reasonable effort to detect this type of configuration and
+ # then offer a good location for where to put the ~/chroot directory.
+ suggest=
+ for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+ if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+ ! is_network_drive "$i"; then
+ suggest="$i"
+ else
+ for j in "$i/"*; do
+ if [ -d "$j" -a -w "$j" -a \
+ \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+ ! is_network_drive "$j"; then
+ suggest="$j"
+ else
+ for k in "$j/"*; do
+ if [ -d "$k" -a -w "$k" -a \
+ \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+ ! is_network_drive "$k"; then
+ suggest="$k"
+ break
+ fi
+ done
+ fi
+ [ -n "${suggest}" ] && break
+ done
+ fi
+ [ -n "${suggest}" ] && break
+ done
+ def_suggest="${HOME}"
+ if [ -n "${suggest}" ]; then
+ # For home directories that reside on network drives, make our suggestion
+ # the default option. For home directories that reside on a local drive,
+ # require that the user manually enters the new location.
+ if is_network_drive "${HOME}"; then
+ def_suggest="${suggest}"
+ else
+ echo "A good location would probably be in \"${suggest}\""
+ fi
+ fi
+ while :; do
+ printf "Physical location [${def_suggest}]: "
+ read dir
+ [ -z "${dir}" ] && dir="${def_suggest}"
+ [ "${dir%%/}" == "${HOME%%/}" ] && break
+ if ! [ -d "${dir}" -a -w "${dir}" ] ||
+ [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+ echo "Cannot write to ${dir}/chroot. Please try again"
+ else
+ mv "${HOME}/chroot" "${dir}/chroot"
+ ln -s "${dir}/chroot" "${HOME}/chroot"
+ for i in $(list_all_chroots); do
+ sudo "$i" mkdir -p "${dir}/chroot"
+ done
+ sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+ break
+ fi
+ done
+fi
+
+# Clean up package files
+sudo schroot -c /usr/local/bin/"${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/media/webrtc/trunk/build/internal/README.chromium b/media/webrtc/trunk/build/internal/README.chromium
new file mode 100644
index 000000000..4624830d2
--- /dev/null
+++ b/media/webrtc/trunk/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+ essential.vsprops
+ Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+ release_defaults.vsprops
+ Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+ release_impl.vsprops
+ Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+ release_impl_checksenabled.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+ release_impl_official.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+ release_impl_pgo_instrument.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+ release_impl_pgo_optimize.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+ release_impl_purify.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/media/webrtc/trunk/build/internal/release_defaults.gypi b/media/webrtc/trunk/build/internal/release_defaults.gypi
new file mode 100644
index 000000000..1bf674ac1
--- /dev/null
+++ b/media/webrtc/trunk/build/internal/release_defaults.gypi
@@ -0,0 +1,18 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true',
+ },
+ 'VCLinkerTool': {
+ # No incremental linking.
+ 'LinkIncremental': '1',
+ # Eliminate Unreferenced Data (/OPT:REF).
+ 'OptimizeReferences': '2',
+ # Folding on (/OPT:ICF).
+ 'EnableCOMDATFolding': '2',
+ },
+ },
+}
diff --git a/media/webrtc/trunk/build/internal/release_impl.gypi b/media/webrtc/trunk/build/internal/release_impl.gypi
new file mode 100644
index 000000000..5ac0e09d1
--- /dev/null
+++ b/media/webrtc/trunk/build/internal/release_impl.gypi
@@ -0,0 +1,17 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'includes': ['release_defaults.gypi'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'OmitFramePointers': 'false',
+ # The above is not sufficient (http://crbug.com/106711): it
+ # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+ # perform FPO regardless, so we must explicitly disable.
+ # We still want the false setting above to avoid having
+ # "/Oy /Oy-" and warnings about overriding.
+ 'AdditionalOptions': ['/Oy-'],
+ },
+ },
+}
diff --git a/media/webrtc/trunk/build/internal/release_impl_official.gypi b/media/webrtc/trunk/build/internal/release_impl_official.gypi
new file mode 100644
index 000000000..d084ae32c
--- /dev/null
+++ b/media/webrtc/trunk/build/internal/release_impl_official.gypi
@@ -0,0 +1,43 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'includes': ['release_defaults.gypi'],
+ 'defines': ['OFFICIAL_BUILD'],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'InlineFunctionExpansion': '2',
+ 'EnableIntrinsicFunctions': 'true',
+ 'EnableFiberSafeOptimizations': 'true',
+ 'OmitFramePointers': 'false',
+ # The above is not sufficient (http://crbug.com/106711): it
+ # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+ # perform FPO regardless, so we must explicitly disable.
+ # We still want the false setting above to avoid having
+ # "/Oy /Oy-" and warnings about overriding.
+ 'AdditionalOptions': ['/Oy-'],
+ },
+ 'VCLibrarianTool': {
+ 'AdditionalOptions': [
+ '/ltcg',
+ '/expectedoutputsize:120000000'
+ ],
+ },
+ 'VCLinkerTool': {
+ 'AdditionalOptions': [
+ '/time',
+ # This may reduce memory fragmentation during linking.
+ # The expected size is 40*1024*1024, which gives us about 10M of
+ # headroom as of Dec 16, 2011.
+ '/expectedoutputsize:41943040',
+ ],
+ 'LinkTimeCodeGeneration': '1',
+ # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to
+ # the generated PDB. According to MSDN documentation, this flag is only
+ # available (or perhaps supported) in the Enterprise (team development)
+ # version of Visual Studio. If this blocks your official build, simply
+ # comment out this line, then re-run "gclient runhooks".
+ 'Profile': 'true',
+ },
+ },
+}
diff --git a/media/webrtc/trunk/build/ios/clean_env.py b/media/webrtc/trunk/build/ios/clean_env.py
new file mode 100755
index 000000000..548e2b92e
--- /dev/null
+++ b/media/webrtc/trunk/build/ios/clean_env.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(argv):
+ """This is like 'env -i', but it uses a whitelist of env variables to allow
+ through to the command being run. It attempts to strip off Xcode-added
+ values from PATH.
+ """
+ # Note: An attempt was made to do something like: env -i bash -lc '[command]'
+ # but that fails to set the things set by login (USER, etc.), so instead
+ # the only approach that seems to work is to have a whitelist.
+ env_key_whitelist = (
+ 'HOME',
+ 'LOGNAME',
+ # 'PATH' added below (but filtered).
+ 'PWD',
+ 'SHELL',
+ 'TEMP',
+ 'TMPDIR',
+ 'USER'
+ )
+
+ # Need something to run.
+ # TODO(lliabraa): Make this output a usage string and exit (here and below).
+ assert(len(argv) > 0)
+
+ add_to_path = [];
+ first_entry = argv[0];
+ if first_entry.startswith('ADD_TO_PATH='):
+ argv = argv[1:];
+ add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
+
+ # Still need something to run.
+ assert(len(argv) > 0)
+
+ clean_env = {}
+
+ # Pull over the whitelisted keys.
+ for key in env_key_whitelist:
+ val = os.environ.get(key, None)
+ if not val is None:
+ clean_env[key] = val
+
+ # Collect the developer dir as set via Xcode, defaulting it.
+ dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
+ if dev_prefix[-1:] != '/':
+ dev_prefix += '/'
+
+ # Now pull in PATH, but remove anything Xcode might have added.
+ initial_path = os.environ.get('PATH', '')
+ filtered_chunks = \
+ [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
+ if filtered_chunks:
+ clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
+
+ # Add any KEY=VALUE args before the command to the cleaned environment.
+ args = argv[:]
+ while '=' in args[0]:
+ (key, val) = args[0].split('=', 1)
+ clean_env[key] = val
+ args = args[1:]
+
+ # Still need something to run.
+ assert(len(args) > 0)
+
+ # Off it goes...
+ os.execvpe(args[0], args, clean_env)
+ # Should never get here, so return a distinctive, non-zero status code.
+ return 66
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/media/webrtc/trunk/build/ios/mac_build.gypi b/media/webrtc/trunk/build/ios/mac_build.gypi
new file mode 100644
index 000000000..9a739182b
--- /dev/null
+++ b/media/webrtc/trunk/build/ios/mac_build.gypi
@@ -0,0 +1,79 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Xcode throws an error if an iOS target depends on a Mac OS X target. So
+# any place a utility program needs to be build and run, an action is
+# used to run ninja as script to work around this.
+# Example:
+# {
+# 'target_name': 'foo',
+# 'type': 'none',
+# 'variables': {
+# # The name of a directory used for ninja. This cannot be shared with
+# # another mac build.
+# 'ninja_output_dir': 'ninja-foo',
+# # The list of all the gyp files that contain the targets to run.
+# 're_run_targets': [
+# 'foo.gyp',
+# ],
+# },
+# 'includes': ['path_to/mac_build.gypi'],
+# 'actions': [
+# {
+# 'action_name': 'compile foo',
+# 'inputs': [],
+# 'outputs': [],
+# 'action': [
+# '<@(ninja_cmd)',
+# # All the targets to build.
+# 'foo1',
+# 'foo2',
+# ],
+# },
+# ],
+# }
+{
+ 'variables': {
+ # Convenience variable pointing to the ninja product directory.
+ 'ninja_product_dir':
+ '<(DEPTH)/xcodebuild/<(ninja_output_dir)/<(CONFIGURATION_NAME)',
+
+ # Common ninja command line flags.
+ 'ninja_cmd': [
+ # Bounce through clean_env to clean up the environment so things
+ # set by the iOS build don't pollute the Mac build.
+ '<(DEPTH)/build/ios/clean_env.py',
+ # ninja must be found in the PATH.
+ 'ADD_TO_PATH=<!(echo $PATH)',
+ 'ninja',
+ '-C',
+ '<(ninja_product_dir)',
+ ],
+
+ # Common syntax to rerun gyp to generate the Mac projects.
+ 're_run_gyp': [
+ 'build/gyp_chromium',
+ # Don't use anything set for the iOS side of things.
+ '--ignore-environment',
+ # Generate for ninja
+ '--format=ninja',
+ # Generate files into xcodebuild/ninja
+ '-Goutput_dir=xcodebuild/<(ninja_output_dir)',
+ # nacl isn't in the iOS checkout, make sure it's turned off
+ '-Ddisable_nacl=1',
+ # Add a variable to handle specific cases for mac_build.
+ '-Dios_mac_build=1',
+ # Pass through the Mac SDK version.
+ '-Dmac_sdk=<(mac_sdk)',
+ ],
+
+ # Rerun gyp for each of the projects needed. This is what actually
+ # generates the projects on disk.
+ 're_run_gyp_execution':
+ '<!(cd <(DEPTH) && <@(re_run_gyp) <@(re_run_targets))',
+ },
+ # Since these are used to generate things needed by other targets, make
+ # them hard dependencies so they are always built first.
+ 'hard_dependency': 1,
+}
diff --git a/media/webrtc/trunk/build/jar_file_jni_generator.gypi b/media/webrtc/trunk/build/jar_file_jni_generator.gypi
new file mode 100644
index 000000000..3c14cf89c
--- /dev/null
+++ b/media/webrtc/trunk/build/jar_file_jni_generator.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for system Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'android_jar_jni_headers',
+# 'type': 'none',
+# 'variables': {
+# 'jni_gen_dir': 'chrome',
+# 'input_java_class': 'java/io/InputStream.class',
+# 'input_jar_file': '<(android_sdk)/android.jar',
+# },
+# 'includes': [ '../build/jar_file_jni_generator.gypi' ],
+# },
+
+{
+ 'variables': {
+ 'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+ },
+ 'actions': [
+ {
+ 'action_name': 'generate_jni_headers_from_jar_file',
+ 'inputs': [
+ '<(jni_generator)',
+ '<(input_jar_file)',
+ ],
+ 'variables': {
+ 'java_class_name': '<!(basename <(input_java_class)|sed "s/\.class//")'
+ },
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni/<(java_class_name)_jni.h',
+ ],
+ 'action': [
+ '<(jni_generator)',
+ '-j',
+ '<(input_jar_file)',
+ '--input_file',
+ '<(input_java_class)',
+ '--output_dir',
+ '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni',
+ ],
+ 'message': 'Generating JNI bindings from <(input_jar_file)/<(input_java_class)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ # This target exports a hard dependency because it generates header
+ # files.
+ 'hard_dependency': 1,
+}
diff --git a/media/webrtc/trunk/build/java.gypi b/media/webrtc/trunk/build/java.gypi
new file mode 100644
index 000000000..56af2e117
--- /dev/null
+++ b/media/webrtc/trunk/build/java.gypi
@@ -0,0 +1,90 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'my-package_java',
+# 'type': 'none',
+# 'variables': {
+# 'package_name': 'my-package',
+# 'java_in_dir': 'path/to/package/root',
+# },
+# 'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The generated jar-file will be:
+# <(PRODUCT_DIR)/lib.java/chromium_<(package_name).jar
+# Required variables:
+# package_name - Used to name the intermediate output directory and in the
+# names of some output files.
+# java_in_dir - The top-level java directory. The src should be in
+# <java_in_dir>/src.
+# Optional/automatic variables:
+# additional_input_paths - These paths will be included in the 'inputs' list to
+# ensure that this target is rebuilt when one of these paths changes.
+# additional_src_dirs - Additional directories with .java files to be compiled
+# and included in the output of this target.
+# generated_src_dirs - Same as additional_src_dirs except used for .java files
+# that are generated at build time. This should be set automatically by a
+# target's dependencies. The .java files in these directories are not
+# included in the 'inputs' list (unlike additional_src_dirs).
+# input_jars_paths - The path to jars to be included in the classpath. This
+# should be filled automatically by depending on the appropriate targets.
+
+{
+ 'dependencies': [
+ '<(DEPTH)/build/build_output_dirs_android.gyp:build_output_dirs'
+ ],
+ # This all_dependent_settings is used for java targets only. This will add the
+ # chromium_<(package_name) jar to the classpath of dependent java targets.
+ 'all_dependent_settings': {
+ 'variables': {
+ 'input_jars_paths': ['<(PRODUCT_DIR)/lib.java/chromium_<(package_name).jar'],
+ },
+ },
+ 'variables': {
+ 'input_jars_paths': [],
+ 'additional_src_dirs': [],
+ 'additional_input_paths': [],
+ 'generated_src_dirs': [],
+ },
+ 'actions': [
+ {
+ 'action_name': 'ant_<(package_name)',
+ 'message': 'Building <(package_name) java sources.',
+ 'inputs': [
+ 'android/ant/common.xml',
+ 'android/ant/chromium-jars.xml',
+ '>!@(find >(java_in_dir) >(additional_src_dirs) -name "*.java")',
+ '>@(input_jars_paths)',
+ '>@(additional_input_paths)',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/lib.java/chromium_<(package_name).jar',
+ ],
+ 'action': [
+ 'ant',
+ '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+ '-DANDROID_SDK=<(android_sdk)',
+ '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+ '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+ '-DANDROID_SDK_VERSION=<(android_sdk_version)',
+ '-DANDROID_GDBSERVER=<(android_gdbserver)',
+ '-DPRODUCT_DIR=<(ant_build_out)',
+
+ '-DADDITIONAL_SRC_DIRS=>(additional_src_dirs)',
+ '-DGENERATED_SRC_DIRS=>(generated_src_dirs)',
+ '-DINPUT_JARS_PATHS=>(input_jars_paths)',
+ '-DPACKAGE_NAME=<(package_name)',
+
+ '-Dbasedir=<(java_in_dir)',
+ '-buildfile',
+ '<(DEPTH)/build/android/ant/chromium-jars.xml'
+ ]
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/java_aidl.gypi b/media/webrtc/trunk/build/java_aidl.gypi
new file mode 100644
index 000000000..9833256a4
--- /dev/null
+++ b/media/webrtc/trunk/build/java_aidl.gypi
@@ -0,0 +1,58 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java aidl files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'aidl_aidl-file-name',
+# 'type': 'none',
+# 'variables': {
+# 'package_name': <name-of-package>
+# 'aidl_interface_file': '<interface-path>/<interface-file>.aidl',
+# },
+# 'sources': {
+# '<input-path1>/<input-file1>.aidl',
+# '<input-path2>/<input-file2>.aidl',
+# ...
+# },
+# 'includes': ['<path-to-this-file>/java_aidl.gypi'],
+# }
+#
+#
+# The generated java files will be:
+# <(PRODUCT_DIR)/lib.java/<input-file1>.java
+# <(PRODUCT_DIR)/lib.java/<input-file2>.java
+# ...
+#
+# TODO(cjhopman): dependents need to rebuild when this target's inputs have changed.
+
+{
+ 'direct_dependent_settings': {
+ 'variables': {
+ 'generated_src_dirs': ['<(SHARED_INTERMEDIATE_DIR)/<(package_name)/aidl/'],
+ },
+ },
+ 'rules': [
+ {
+ 'rule_name': 'compile_aidl',
+ 'extension': 'aidl',
+ 'inputs': [
+ '<(android_sdk)/framework.aidl',
+ '<(aidl_interface_file)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(package_name)/aidl/<(RULE_INPUT_ROOT).java',
+ ],
+ 'action': [
+ '<(android_sdk_tools)/aidl',
+ '-p<(android_sdk)/framework.aidl',
+ '-p<(aidl_interface_file)',
+ '<(RULE_INPUT_PATH)',
+ '<(SHARED_INTERMEDIATE_DIR)/<(package_name)/aidl/<(RULE_INPUT_ROOT).java',
+ ],
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/jni_generator.gypi b/media/webrtc/trunk/build/jni_generator.gypi
new file mode 100644
index 000000000..837d9ab19
--- /dev/null
+++ b/media/webrtc/trunk/build/jni_generator.gypi
@@ -0,0 +1,58 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'base_jni_headers',
+# 'type': 'none',
+# 'sources': [
+# 'android/java/src/org/chromium/base/BuildInfo.java',
+# ...
+# ...
+# 'android/java/src/org/chromium/base/SystemMessageHandler.java',
+# ],
+# 'variables': {
+# 'jni_gen_dir': 'base',
+# },
+# 'includes': [ '../build/jni_generator.gypi' ],
+# },
+#
+# The generated file name pattern can be seen on the "outputs" section below.
+# (note that RULE_INPUT_ROOT is the basename for the java file).
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+
+{
+ 'variables': {
+ 'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+ },
+ 'rules': [
+ {
+ 'rule_name': 'generate_jni_headers',
+ 'extension': 'java',
+ 'inputs': [
+ '<(jni_generator)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni/<(RULE_INPUT_ROOT)_jni.h',
+ ],
+ 'action': [
+ '<(jni_generator)',
+ '--input_file',
+ '<(RULE_INPUT_PATH)',
+ '--output_dir',
+ '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_dir)/jni',
+ ],
+ 'message': 'Generating JNI bindings from <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ # This target exports a hard dependency because it generates header
+ # files.
+ 'hard_dependency': 1,
+}
diff --git a/media/webrtc/trunk/build/json_schema_bundle_compile.gypi b/media/webrtc/trunk/build/json_schema_bundle_compile.gypi
new file mode 100644
index 000000000..ecefe416e
--- /dev/null
+++ b/media/webrtc/trunk/build/json_schema_bundle_compile.gypi
@@ -0,0 +1,62 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ # When including this gypi, the following variables must be set:
+ # idl_schema_files: an array of idl files that comprise the api model.
+ # cc_dir: path to generated files
+ # root_namespace: the C++ namespace that all generated files go under
+ # Functions and namespaces can be excluded by setting "nocompile" to true.
+ 'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+ 'api_gen': '<(api_gen_dir)/compiler.py',
+ },
+ 'actions': [
+ {
+ 'action_name': 'genapi_bundle',
+ 'inputs': [
+ '<(api_gen_dir)/cc_generator.py',
+ '<(api_gen_dir)/code.py',
+ '<(api_gen_dir)/compiler.py',
+ '<(api_gen_dir)/cpp_type_generator.py',
+ '<(api_gen_dir)/cpp_util.py',
+ '<(api_gen_dir)/h_generator.py',
+ '<(api_gen_dir)/idl_schema.py',
+ '<(api_gen_dir)/json_schema.py',
+ '<(api_gen_dir)/model.py',
+ '<(api_gen_dir)/schema_bundle_generator.py',
+ '<(api_gen_dir)/util_cc_helper.py',
+ '<@(idl_schema_files)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_api.h',
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.h',
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.cc',
+ ],
+ 'action': [
+ 'python',
+ '<(api_gen)',
+ '--root=<(DEPTH)',
+ '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+ '--namespace=<(root_namespace)',
+ '--bundle',
+ '<@(idl_schema_files)',
+ ],
+ 'message': 'Generating C++ API bundle code',
+ 'process_outputs_as_sources': 1,
+ }
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ '<(DEPTH)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ]
+ },
+ # This target exports a hard dependency because it generates header
+ # files.
+ 'hard_dependency': 1,
+}
diff --git a/media/webrtc/trunk/build/json_schema_compile.gypi b/media/webrtc/trunk/build/json_schema_compile.gypi
new file mode 100644
index 000000000..6c8f69c5d
--- /dev/null
+++ b/media/webrtc/trunk/build/json_schema_compile.gypi
@@ -0,0 +1,110 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ # When including this gypi, the following variables must be set:
+ # json_schema_files: a list of json files that comprise the api model.
+ # idl_schema_files: a list of IDL files that comprise the api model.
+ # cc_dir: path to generated files
+ # root_namespace: the C++ namespace that all generated files go under
+ # Functions and namespaces can be excluded by setting "nocompile" to true.
+ 'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+ 'api_gen': '<(api_gen_dir)/compiler.py',
+ },
+ 'rules': [
+ {
+ 'rule_name': 'genapi',
+ 'extension': 'json',
+ 'inputs': [
+ '<(api_gen_dir)/any.cc',
+ '<(api_gen_dir)/any.h',
+ '<(api_gen_dir)/any_helper.py',
+ '<(api_gen_dir)/cc_generator.py',
+ '<(api_gen_dir)/code.py',
+ '<(api_gen_dir)/compiler.py',
+ '<(api_gen_dir)/cpp_type_generator.py',
+ '<(api_gen_dir)/cpp_util.py',
+ '<(api_gen_dir)/h_generator.py',
+ '<(api_gen_dir)/json_schema.py',
+ '<(api_gen_dir)/model.py',
+ '<(api_gen_dir)/util.cc',
+ '<(api_gen_dir)/util.h',
+ '<(api_gen_dir)/util_cc_helper.py',
+ # TODO(calamity): uncomment this when gyp on windows behaves like other
+ # platforms. List expansions of filepaths in inputs expand to different
+ # things.
+ # '<@(json_schema_files)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).cc',
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).h',
+ ],
+ 'action': [
+ 'python',
+ '<(api_gen)',
+ '<(RULE_INPUT_PATH)',
+ '--root=<(DEPTH)',
+ '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+ '--namespace=<(root_namespace)',
+ ],
+ 'message': 'Generating C++ code from <(RULE_INPUT_PATH) json files',
+ 'process_outputs_as_sources': 1,
+ },
+ {
+ 'rule_name': 'genapi_idl',
+ 'msvs_external_rule': 1,
+ 'extension': 'idl',
+ 'inputs': [
+ '<(api_gen_dir)/any.cc',
+ '<(api_gen_dir)/any.h',
+ '<(api_gen_dir)/any_helper.py',
+ '<(api_gen_dir)/cc_generator.py',
+ '<(api_gen_dir)/code.py',
+ '<(api_gen_dir)/compiler.py',
+ '<(api_gen_dir)/cpp_type_generator.py',
+ '<(api_gen_dir)/cpp_util.py',
+ '<(api_gen_dir)/h_generator.py',
+ '<(api_gen_dir)/idl_schema.py',
+ '<(api_gen_dir)/model.py',
+ '<(api_gen_dir)/util.cc',
+ '<(api_gen_dir)/util.h',
+ '<(api_gen_dir)/util_cc_helper.py',
+ # TODO(calamity): uncomment this when gyp on windows behaves like other
+ # platforms. List expansions of filepaths in inputs expand to different
+ # things.
+ # '<@(idl_schema_files)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).cc',
+ '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).h',
+ ],
+ 'action': [
+ 'python',
+ '<(api_gen)',
+ '<(RULE_INPUT_PATH)',
+ '--root=<(DEPTH)',
+ '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+ '--namespace=<(root_namespace)',
+ ],
+ 'message': 'Generating C++ code from <(RULE_INPUT_PATH) IDL files',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ '<(DEPTH)',
+ ],
+ 'dependencies':[
+ '<(DEPTH)/tools/json_schema_compiler/api_gen_util.gyp:api_gen_util',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ]
+ },
+ # This target exports a hard dependency because it generates header
+ # files.
+ 'hard_dependency': 1,
+}
diff --git a/media/webrtc/trunk/build/linux/chrome_linux.croc b/media/webrtc/trunk/build/linux/chrome_linux.croc
new file mode 100644
index 000000000..f4003060f
--- /dev/null
+++ b/media/webrtc/trunk/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+ # List of rules, applied in order
+ 'rules' : [
+ # Specify inclusions before exclusions, since rules are in order.
+
+ # Don't include non-Linux platform dirs
+ {
+ 'regexp' : '.*/(chromeos|views)/',
+ 'include' : 0,
+ },
+ # Don't include chromeos, windows, or mac specific files
+ {
+ 'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+ 'include' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '.*_test_linux\\.',
+ 'group' : 'test',
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/linux/dump_app_syms b/media/webrtc/trunk/build/linux/dump_app_syms
new file mode 100755
index 000000000..632bcc76a
--- /dev/null
+++ b/media/webrtc/trunk/build/linux/dump_app_syms
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+set -e
+
+usage() {
+ echo -n "$0 <dump_syms_exe> <strip_binary> " >&2
+ echo "<binary_with_symbols> <symbols_output>" >&2
+}
+
+
+if [ $# -ne 4 ]; then
+ usage
+ exit 1
+fi
+
+SCRIPTDIR="$(readlink -f "$(dirname "$0")")"
+DUMPSYMS="$1"
+STRIP_BINARY="$2"
+INFILE="$3"
+OUTFILE="$4"
+
+# Dump the symbols from the given binary.
+if [ ! -e "$OUTFILE" -o "$INFILE" -nt "$OUTFILE" ]; then
+ "$DUMPSYMS" "$INFILE" > "$OUTFILE"
+fi
+
+if [ "$STRIP_BINARY" != "0" ]; then
+ strip "$INFILE"
+fi
diff --git a/media/webrtc/trunk/build/linux/pkg-config-wrapper b/media/webrtc/trunk/build/linux/pkg-config-wrapper
new file mode 100755
index 000000000..c39e5cd3f
--- /dev/null
+++ b/media/webrtc/trunk/build/linux/pkg-config-wrapper
@@ -0,0 +1,47 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+
+root="$1"
+shift
+target_arch="$1"
+shift
+
+if [ -z "$root" -o -z "$target_arch" ]
+then
+ echo "usage: $0 /path/to/sysroot target_arch [pkg-config-arguments] package" >&2
+ exit 1
+fi
+
+if [ "$target_arch" = "x64" ]
+then
+ libpath="lib64"
+else
+ libpath="lib"
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+config_path=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/media/webrtc/trunk/build/linux/python_arch.sh b/media/webrtc/trunk/build/linux/python_arch.sh
new file mode 100755
index 000000000..01e41d066
--- /dev/null
+++ b/media/webrtc/trunk/build/linux/python_arch.sh
@@ -0,0 +1,42 @@
+#!/bin/sh
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This figures out the architecture of the version of Python we are building
+# pyautolib against.
+#
+# python_arch.sh /usr/lib/libpython2.5.so.1.0
+# python_arch.sh /path/to/sysroot/usr/lib/libpython2.4.so.1.0
+#
+
+python=$(readlink -f "$1")
+if [ ! -r "$python" ]; then
+ echo unknown
+ exit 0
+fi
+file_out=$(file "$python")
+if [ $? -ne 0 ]; then
+ echo unknown
+ exit 0
+fi
+
+echo $file_out | grep -qs "ARM"
+if [ $? -eq 0 ]; then
+ echo arm
+ exit 0
+fi
+
+echo $file_out | grep -qs "x86-64"
+if [ $? -eq 0 ]; then
+ echo x64
+ exit 0
+fi
+
+echo $file_out | grep -qs "Intel 80386"
+if [ $? -eq 0 ]; then
+ echo ia32
+ exit 0
+fi
+
+exit 1
diff --git a/media/webrtc/trunk/build/linux/rewrite_dirs.py b/media/webrtc/trunk/build/linux/rewrite_dirs.py
new file mode 100755
index 000000000..30f22f0cd
--- /dev/null
+++ b/media/webrtc/trunk/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+ '-idirafter',
+ '-imacros',
+ '-imultilib',
+ '-include',
+ '-iprefix',
+ '-iquote',
+ '-isystem',
+ '-L']
+
+def RewritePath(path, opts):
+ """Rewrites a path by stripping the prefix and prepending the sysroot."""
+ sysroot = opts.sysroot
+ prefix = opts.strip_prefix
+ if os.path.isabs(path) and not path.startswith(sysroot):
+ if path.startswith(prefix):
+ path = path[len(prefix):]
+ path = path.lstrip('/')
+ return os.path.join(sysroot, path)
+ else:
+ return path
+
+
+def RewriteLine(line, opts):
+ """Rewrites all the paths in recognized options."""
+ args = line.split()
+ count = len(args)
+ i = 0
+ while i < count:
+ for prefix in REWRITE_PREFIX:
+ # The option can be either in the form "-I /path/to/dir" or
+ # "-I/path/to/dir" so handle both.
+ if args[i] == prefix:
+ i += 1
+ try:
+ args[i] = RewritePath(args[i], opts)
+ except IndexError:
+ sys.stderr.write('Missing argument following %s\n' % prefix)
+ break
+ elif args[i].startswith(prefix):
+ args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+ i += 1
+
+ return ' '.join(args)
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+ parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+ opts, args = parser.parse_args(argv[1:])
+
+ for line in sys.stdin.readlines():
+ line = RewriteLine(line.strip(), opts)
+ print line
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/linux/system.gyp b/media/webrtc/trunk/build/linux/system.gyp
new file mode 100644
index 000000000..e36e558a8
--- /dev/null
+++ b/media/webrtc/trunk/build/linux/system.gyp
@@ -0,0 +1,637 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'conditions': [
+ ['sysroot!=""', {
+ 'pkg-config': './pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
+ }, {
+ 'pkg-config': 'pkg-config'
+ }]
+ ],
+ },
+ 'conditions': [
+ [ 'os_posix==1 and OS!="mac"', {
+ 'variables': {
+ # We use our own copy of libssl3, although we still need to link against
+ # the rest of NSS.
+ 'use_system_ssl%': 0,
+ },
+ }, {
+ 'variables': {
+ 'use_system_ssl%': 1,
+ },
+ }],
+ [ 'chromeos==0', {
+ # Hide GTK and related dependencies for Chrome OS, so they won't get
+ # added back to Chrome OS. Don't try to use GTK on Chrome OS.
+ 'targets': [
+ {
+ 'target_name': 'gtk',
+ 'type': 'none',
+ 'toolsets': ['host', 'target'],
+ 'variables': {
+ # gtk requires gmodule, but it does not list it as a dependency
+ # in some misconfigured systems.
+ 'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0',
+ },
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags <(gtk_packages))',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l <(gtk_packages))',
+ ],
+ },
+ }, {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(pkg-config --cflags <(gtk_packages))',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(pkg-config --libs-only-L --libs-only-other <(gtk_packages))',
+ ],
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l <(gtk_packages))',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'gtkprint',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'gdk',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags gdk-2.0)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
+ ],
+ },
+ }],
+ ],
+ },
+ ], # targets
+ }, { # chromeos==1
+ 'targets': [
+ {
+ # TODO(satorux): Remove this once dbus-glib clients are gone.
+ 'target_name': 'dbus-glib',
+ 'type': 'none',
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags dbus-glib-1)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-glib-1)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l dbus-glib-1)',
+ ],
+ },
+ },
+ ],
+ }]
+ ], # conditions
+ 'targets': [
+ {
+ 'target_name': 'ssl',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'conditions': [
+ ['use_openssl==1', {
+ 'dependencies': [
+ '../../third_party/openssl/openssl.gyp:openssl',
+ ],
+ }],
+ ['use_openssl==0 and use_system_ssl==0', {
+ 'dependencies': [
+ '../../net/third_party/nss/ssl.gyp:libssl',
+ '../../third_party/zlib/zlib.gyp:zlib',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs+': [
+ # We need for our local copies of the libssl3 headers to come
+ # before other includes, as we are shadowing system headers.
+ '<(DEPTH)/net/third_party/nss/ssl',
+ ],
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags nss)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+ ],
+ },
+ }],
+ ['use_openssl==0 and use_system_ssl==1', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags nss)',
+ ],
+ 'defines': [
+ 'USE_SYSTEM_SSL',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l nss)',
+ ],
+ },
+ }],
+ ]
+ }],
+ ],
+ },
+ {
+ 'target_name': 'freetype2',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags freetype2)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l freetype2)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'fontconfig',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags fontconfig)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l fontconfig)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'gconf',
+ 'type': 'none',
+ 'conditions': [
+ ['use_gconf==1 and _toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags gconf-2.0)',
+ ],
+ 'defines': [
+ 'USE_GCONF',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'gio',
+ 'type': 'none',
+ 'conditions': [
+ ['use_gio==1 and _toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags gio-2.0)',
+ ],
+ 'defines': [
+ 'USE_GIO',
+ ],
+ 'conditions': [
+ ['linux_link_gsettings==0', {
+ 'defines': ['DLOPEN_GSETTINGS'],
+ }],
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l gio-2.0)',
+ ],
+ 'conditions': [
+ ['linux_link_gsettings==0 and OS=="linux"', {
+ 'libraries': [
+ '-ldl',
+ ],
+ }],
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'x11',
+ 'type': 'none',
+ 'toolsets': ['host', 'target'],
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags x11)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l x11 xi)',
+ ],
+ },
+ }, {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(pkg-config --cflags x11)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
+ ],
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l x11 xi)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'xext',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags xext)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l xext)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'xfixes',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags xfixes)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l xfixes)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'libgcrypt',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target" and use_cups==1', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(libgcrypt-config --cflags)',
+ ],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '<!@(libgcrypt-config --libs)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'selinux',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lselinux',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'gnome_keyring',
+ 'type': 'none',
+ 'conditions': [
+ ['use_gnome_keyring==1', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+ ],
+ 'defines': [
+ 'USE_GNOME_KEYRING',
+ ],
+ 'conditions': [
+ ['linux_link_gnome_keyring==0', {
+ 'defines': ['DLOPEN_GNOME_KEYRING'],
+ }],
+ ],
+ },
+ 'conditions': [
+ ['linux_link_gnome_keyring!=0', {
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+ ],
+ },
+ }, {
+ 'conditions': [
+ ['OS=="linux"', {
+ 'link_settings': {
+ 'libraries': [
+ '-ldl',
+ ],
+ },
+ }],
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ # The unit tests use a few convenience functions from the GNOME
+ # Keyring library directly. We ignore linux_link_gnome_keyring and
+ # link directly in this version of the target to allow this.
+ # *** Do not use this target in the main binary! ***
+ 'target_name': 'gnome_keyring_direct',
+ 'type': 'none',
+ 'conditions': [
+ ['use_gnome_keyring==1', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+ ],
+ 'defines': [
+ 'USE_GNOME_KEYRING',
+ ],
+ 'conditions': [
+ ['linux_link_gnome_keyring==0', {
+ 'defines': ['DLOPEN_GNOME_KEYRING'],
+ }],
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'dbus',
+ 'type': 'none',
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags dbus-1)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l dbus-1)',
+ ],
+ },
+ },
+ {
+ 'target_name': 'glib',
+ 'type': 'none',
+ 'toolsets': ['host', 'target'],
+ 'variables': {
+ 'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0',
+ },
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags <(glib_packages))',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other <(glib_packages))',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l <(glib_packages))',
+ ],
+ },
+ }, {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(pkg-config --cflags <(glib_packages))',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(pkg-config --libs-only-L --libs-only-other <(glib_packages))',
+ ],
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l <(glib_packages))',
+ ],
+ },
+ }],
+ ['chromeos==1', {
+ 'link_settings': {
+ 'libraries': [ '-lXtst' ]
+ }
+ }],
+ ],
+ },
+ {
+ 'target_name': 'pangocairo',
+ 'type': 'none',
+ 'toolsets': ['host', 'target'],
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags pangocairo)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l pangocairo)',
+ ],
+ },
+ }, {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(pkg-config --cflags pangocairo)',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(pkg-config --libs-only-L --libs-only-other pangocairo)',
+ ],
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l pangocairo)',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'libresolv',
+ 'type': 'none',
+ 'link_settings': {
+ 'libraries': [
+ '-lresolv',
+ ],
+ },
+ },
+ {
+ 'target_name': 'ibus',
+ 'type': 'none',
+ 'conditions': [
+ ['use_ibus==1', {
+ 'variables': {
+ 'ibus_min_version': '1.3.99.20110425',
+ },
+ 'direct_dependent_settings': {
+ 'defines': ['HAVE_IBUS=1'],
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags "ibus-1.0 >= <(ibus_min_version)")',
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other "ibus-1.0 >= <(ibus_min_version)")',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l "ibus-1.0 >= <(ibus_min_version)")',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'udev',
+ 'type': 'none',
+ 'conditions': [
+ # libudev is not available on *BSD
+ ['_toolset=="target" and os_bsd!=1', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags libudev)'
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other libudev)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l libudev)',
+ ],
+ },
+ }],
+ ],
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/mac/OWNERS b/media/webrtc/trunk/build/mac/OWNERS
new file mode 100644
index 000000000..c56e89dd1
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/OWNERS
@@ -0,0 +1,2 @@
+mark@chromium.org
+thomasvl@chromium.org
diff --git a/media/webrtc/trunk/build/mac/change_mach_o_flags.py b/media/webrtc/trunk/build/mac/change_mach_o_flags.py
new file mode 100755
index 000000000..c2aeaec9b
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/change_mach_o_flags.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: change_mach_o_flags.py [--executable-heap] [--no-pie] <executablepath>
+
+Arranges for the executable at |executable_path| to have its data (heap)
+pages protected to prevent execution on Mac OS X 10.7 ("Lion"), and to have
+the PIE (position independent executable) bit set to enable ASLR (address
+space layout randomization). With --executable-heap or --no-pie, the
+respective bits are cleared instead of set, making the heap executable or
+disabling PIE/ASLR.
+
+This script is able to operate on thin (single-architecture) Mach-O files
+and fat (universal, multi-architecture) files. When operating on fat files,
+it will set or clear the bits for each architecture contained therein.
+
+NON-EXECUTABLE HEAP
+
+Traditionally in Mac OS X, 32-bit processes did not have data pages set to
+prohibit execution. Although user programs could call mprotect and
+mach_vm_protect to deny execution of code in data pages, the kernel would
+silently ignore such requests without updating the page tables, and the
+hardware would happily execute code on such pages. 64-bit processes were
+always given proper hardware protection of data pages. This behavior was
+controllable on a system-wide level via the vm.allow_data_exec sysctl, which
+is set by default to 1. The bit with value 1 (set by default) allows code
+execution on data pages for 32-bit processes, and the bit with value 2
+(clear by default) does the same for 64-bit processes.
+
+In Mac OS X 10.7, executables can "opt in" to having hardware protection
+against code execution on data pages applied. This is done by setting a new
+bit in the |flags| field of an executable's |mach_header|. When
+MH_NO_HEAP_EXECUTION is set, proper protections will be applied, regardless
+of the setting of vm.allow_data_exec. See xnu-1699.22.73/osfmk/vm/vm_map.c
+override_nx and xnu-1699.22.73/bsd/kern/mach_loader.c load_machfile.
+
+The Apple toolchain has been revised to set the MH_NO_HEAP_EXECUTION when
+producing executables, provided that -allow_heap_execute is not specified
+at link time. Only linkers shipping with Xcode 4.0 and later (ld64-123.2 and
+later) have this ability. See ld64-123.2.1/src/ld/Options.cpp
+Options::reconfigureDefaults() and
+ld64-123.2.1/src/ld/HeaderAndLoadCommands.hpp
+HeaderAndLoadCommandsAtom<A>::flags().
+
+This script sets the MH_NO_HEAP_EXECUTION bit on Mach-O executables. It is
+intended for use with executables produced by a linker that predates Apple's
+modifications to set this bit itself. It is also useful for setting this bit
+for non-i386 executables, including x86_64 executables. Apple's linker only
+sets it for 32-bit i386 executables, presumably under the assumption that
+the value of vm.allow_data_exec is set in stone. However, if someone were to
+change vm.allow_data_exec to 2 or 3, 64-bit x86_64 executables would run
+without hardware protection against code execution on data pages. This
+script can set the bit for x86_64 executables, guaranteeing that they run
+with appropriate protection even when vm.allow_data_exec has been tampered
+with.
+
+POSITION-INDEPENDENT EXECUTABLES/ADDRESS SPACE LAYOUT RANDOMIZATION
+
+This script sets or clears the MH_PIE bit in an executable's Mach-O header,
+enabling or disabling position independence on Mac OS X 10.5 and later.
+Processes running position-independent executables have varying levels of
+ASLR protection depending on the OS release. The main executable's load
+address, shared library load addresess, and the heap and stack base
+addresses may be randomized. Position-independent executables are produced
+by supplying the -pie flag to the linker (or defeated by supplying -no_pie).
+Executables linked with a deployment target of 10.7 or higher have PIE on
+by default.
+
+This script is never strictly needed during the build to enable PIE, as all
+linkers used are recent enough to support -pie. However, it's used to
+disable the PIE bit as needed on already-linked executables.
+"""
+
+import optparse
+import os
+import struct
+import sys
+
+
+# <mach-o/fat.h>
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+
+# <mach-o/loader.h>
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+MH_EXECUTE = 0x2
+MH_PIE = 0x00200000
+MH_NO_HEAP_EXECUTION = 0x01000000
+
+
+class MachOError(Exception):
+ """A class for exceptions thrown by this module."""
+
+ pass
+
+
+def CheckedSeek(file, offset):
+ """Seeks the file-like object at |file| to offset |offset| and raises a
+ MachOError if anything funny happens."""
+
+ file.seek(offset, os.SEEK_SET)
+ new_offset = file.tell()
+ if new_offset != offset:
+ raise MachOError, \
+ 'seek: expected offset %d, observed %d' % (offset, new_offset)
+
+
+def CheckedRead(file, count):
+ """Reads |count| bytes from the file-like |file| object, raising a
+ MachOError if any other number of bytes is read."""
+
+ bytes = file.read(count)
+ if len(bytes) != count:
+ raise MachOError, \
+ 'read: expected length %d, observed %d' % (count, len(bytes))
+
+ return bytes
+
+
+def ReadUInt32(file, endian):
+ """Reads an unsinged 32-bit integer from the file-like |file| object,
+ treating it as having endianness specified by |endian| (per the |struct|
+ module), and returns it as a number. Raises a MachOError if the proper
+ length of data can't be read from |file|."""
+
+ bytes = CheckedRead(file, 4)
+
+ (uint32,) = struct.unpack(endian + 'I', bytes)
+ return uint32
+
+
+def ReadMachHeader(file, endian):
+ """Reads an entire |mach_header| structure (<mach-o/loader.h>) from the
+ file-like |file| object, treating it as having endianness specified by
+ |endian| (per the |struct| module), and returns a 7-tuple of its members
+ as numbers. Raises a MachOError if the proper length of data can't be read
+ from |file|."""
+
+ bytes = CheckedRead(file, 28)
+
+ magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+ struct.unpack(endian + '7I', bytes)
+ return magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags
+
+
+def ReadFatArch(file):
+ """Reads an entire |fat_arch| structure (<mach-o/fat.h>) from the file-like
+ |file| object, treating it as having endianness specified by |endian|
+ (per the |struct| module), and returns a 5-tuple of its members as numbers.
+ Raises a MachOError if the proper length of data can't be read from
+ |file|."""
+
+ bytes = CheckedRead(file, 20)
+
+ cputype, cpusubtype, offset, size, align = struct.unpack('>5I', bytes)
+ return cputype, cpusubtype, offset, size, align
+
+
+def WriteUInt32(file, uint32, endian):
+ """Writes |uint32| as an unsinged 32-bit integer to the file-like |file|
+ object, treating it as having endianness specified by |endian| (per the
+ |struct| module)."""
+
+ bytes = struct.pack(endian + 'I', uint32)
+ assert len(bytes) == 4
+
+ file.write(bytes)
+
+
+def HandleMachOFile(file, options, offset=0):
+ """Seeks the file-like |file| object to |offset|, reads its |mach_header|,
+ and rewrites the header's |flags| field if appropriate. The header's
+ endianness is detected. Both 32-bit and 64-bit Mach-O headers are supported
+ (mach_header and mach_header_64). Raises MachOError if used on a header that
+ does not have a known magic number or is not of type MH_EXECUTE. The
+ MH_PIE and MH_NO_HEAP_EXECUTION bits are set or cleared in the |flags| field
+ according to |options| and written to |file| if any changes need to be made.
+ If already set or clear as specified by |options|, nothing is written."""
+
+ CheckedSeek(file, offset)
+ magic = ReadUInt32(file, '<')
+ if magic == MH_MAGIC or magic == MH_MAGIC_64:
+ endian = '<'
+ elif magic == MH_CIGAM or magic == MH_CIGAM_64:
+ endian = '>'
+ else:
+ raise MachOError, \
+ 'Mach-O file at offset %d has illusion of magic' % offset
+
+ CheckedSeek(file, offset)
+ magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+ ReadMachHeader(file, endian)
+ assert magic == MH_MAGIC or magic == MH_MAGIC_64
+ if filetype != MH_EXECUTE:
+ raise MachOError, \
+ 'Mach-O file at offset %d is type 0x%x, expected MH_EXECUTE' % \
+ (offset, filetype)
+
+ original_flags = flags
+
+ if options.no_heap_execution:
+ flags |= MH_NO_HEAP_EXECUTION
+ else:
+ flags &= ~MH_NO_HEAP_EXECUTION
+
+ if options.pie:
+ flags |= MH_PIE
+ else:
+ flags &= ~MH_PIE
+
+ if flags != original_flags:
+ CheckedSeek(file, offset + 24)
+ WriteUInt32(file, flags, endian)
+
+
+def HandleFatFile(file, options, fat_offset=0):
+ """Seeks the file-like |file| object to |offset| and loops over its
+ |fat_header| entries, calling HandleMachOFile for each."""
+
+ CheckedSeek(file, fat_offset)
+ magic = ReadUInt32(file, '>')
+ assert magic == FAT_MAGIC
+
+ nfat_arch = ReadUInt32(file, '>')
+
+ for index in xrange(0, nfat_arch):
+ cputype, cpusubtype, offset, size, align = ReadFatArch(file)
+ assert size >= 28
+
+ # HandleMachOFile will seek around. Come back here after calling it, in
+ # case it sought.
+ fat_arch_offset = file.tell()
+ HandleMachOFile(file, options, offset)
+ CheckedSeek(file, fat_arch_offset)
+
+
+def main(me, args):
+ parser = optparse.OptionParser('%prog [options] <executable_path>')
+ parser.add_option('--executable-heap', action='store_false',
+ dest='no_heap_execution', default=True,
+ help='Clear the MH_NO_HEAP_EXECUTION bit')
+ parser.add_option('--no-pie', action='store_false',
+ dest='pie', default=True,
+ help='Clear the MH_PIE bit')
+ (options, loose_args) = parser.parse_args(args)
+ if len(loose_args) != 1:
+ parser.print_usage()
+ return 1
+
+ executable_path = loose_args[0]
+ executable_file = open(executable_path, 'rb+')
+
+ magic = ReadUInt32(executable_file, '<')
+ if magic == FAT_CIGAM:
+ # Check FAT_CIGAM and not FAT_MAGIC because the read was little-endian.
+ HandleFatFile(executable_file, options)
+ elif magic == MH_MAGIC or magic == MH_CIGAM or \
+ magic == MH_MAGIC_64 or magic == MH_CIGAM_64:
+ HandleMachOFile(executable_file, options)
+ else:
+ raise MachOError, '%s is not a Mach-O or fat file' % executable_file
+
+ executable_file.close()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[0], sys.argv[1:]))
diff --git a/media/webrtc/trunk/build/mac/change_mach_o_flags_from_xcode.sh b/media/webrtc/trunk/build/mac/change_mach_o_flags_from_xcode.sh
new file mode 100755
index 000000000..1824f8db5
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/change_mach_o_flags_from_xcode.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small wrapper script around change_mach_o_flags.py allowing it to
+# be invoked easily from Xcode. change_mach_o_flags.py expects its arguments
+# on the command line, but Xcode puts its parameters in the environment.
+
+set -e
+
+exec "$(dirname "${0}")/change_mach_o_flags.py" \
+ "${@}" \
+ "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/media/webrtc/trunk/build/mac/chrome_mac.croc b/media/webrtc/trunk/build/mac/chrome_mac.croc
new file mode 100644
index 000000000..8cde00ce2
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/chrome_mac.croc
@@ -0,0 +1,36 @@
+# -*- python -*-
+# Crocodile config file for Chromium mac
+
+{
+ # List of rules, applied in order
+ 'rules' : [
+ # Specify inclusions before exclusions, since rules are in order.
+
+ # Don't include chromeos, linux, or windows specific files
+ {
+ 'regexp' : '.*(_|/)(chromeos|linux|win|views)(\\.|_)',
+ 'include' : 0,
+ },
+ # Don't include ChromeOS dirs
+ {
+ 'regexp' : '.*/chromeos/',
+ 'include' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '.*_test_mac\\.',
+ 'group' : 'test',
+ },
+
+ # Languages
+ {
+ 'regexp' : '.*\\.m$',
+ 'language' : 'ObjC',
+ },
+ {
+ 'regexp' : '.*\\.mm$',
+ 'language' : 'ObjC++',
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/mac/copy_framework_unversioned.sh b/media/webrtc/trunk/build/mac/copy_framework_unversioned.sh
new file mode 100755
index 000000000..380cc9084
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/copy_framework_unversioned.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Copies a framework to its new home, "unversioning" it.
+#
+# Normally, frameworks are versioned bundles. The contents of a framework are
+# stored in a versioned directory within the bundle, and symbolic links
+# provide access to the actual code and resources. See
+# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
+#
+# The symbolic links usually found in frameworks create problems. Symbolic
+# links are excluded from code signatures. That means that it's possible to
+# remove or retarget a symbolic link within a framework without affecting the
+# seal. In Chrome's case, the outer .app bundle contains a framework where
+# all application code and resources live. In order for the signature on the
+# .app to be meaningful, it encompasses the framework. Because framework
+# resources are accessed through the framework's symbolic links, this
+# arrangement results in a case where the resources can be altered without
+# affecting the .app signature's validity.
+#
+# Indirection through symbolic links also carries a runtime performance
+# penalty on open() operations, although open() typically completes so quickly
+# that this is not considered a major performance problem.
+#
+# To resolve these problems, the frameworks that ship within Chrome's .app
+# bundle are unversioned. Unversioning is simple: instead of using the
+# original outer .framework directory as the framework that ships within the
+# .app, the inner versioned directory is used. Instead of accessing bundled
+# resources through symbolic links, they are accessed directly. In normal
+# situations, the only hard-coded use of the versioned directory is by dyld,
+# when loading the framework's code, but this is handled through a normal
+# Mach-O load command, and it is easy to adjust the load command to point to
+# the unversioned framework code rather than the versioned counterpart.
+#
+# The resulting framework bundles aren't strictly conforming, but they work
+# as well as normal versioned framework bundles.
+#
+# An option to skip running install_name_tool is available. By passing -I as
+# the first argument to this script, install_name_tool will be skipped. This
+# is only suitable for copied frameworks that will not be linked against, or
+# when install_name_tool will be run on any linker output when something is
+# linked against the copied framework. This option exists to allow signed
+# frameworks to pass through without subjecting them to any modifications that
+# would break their signatures.
+
+set -e
+
+RUN_INSTALL_NAME_TOOL=1
+if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then
+ shift
+ RUN_INSTALL_NAME_TOOL=
+fi
+
+if [ $# -ne 2 ] ; then
+ echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2
+ exit 1
+fi
+
+# FRAMEWORK should be a path to a versioned framework bundle, ending in
+# .framework. DESTINATION_DIR is the directory that the unversioned framework
+# bundle will be copied to.
+
+FRAMEWORK="${1}"
+DESTINATION_DIR="${2}"
+
+FRAMEWORK_NAME="$(basename "${FRAMEWORK}")"
+if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then
+ echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2
+ exit 1
+fi
+FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}"
+
+# Find the current version.
+VERSIONS="${FRAMEWORK}/Versions"
+CURRENT_VERSION_LINK="${VERSIONS}/Current"
+CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")"
+CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}"
+
+# Make sure that the framework's structure makes sense as a versioned bundle.
+if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then
+ echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2
+ exit 1
+fi
+
+DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}"
+
+# Copy the versioned directory within the versioned framework to its
+# destination location.
+mkdir -p "${DESTINATION_DIR}"
+rsync -acC --delete --exclude Headers --exclude PrivateHeaders \
+ --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}"
+
+if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then
+ # Adjust the Mach-O LC_ID_DYLIB load command in the framework. This does not
+ # change the LC_LOAD_DYLIB load commands in anything that may have already
+ # linked against the framework. Not all frameworks will actually need this
+ # to be changed. Some frameworks may already be built with the proper
+ # LC_ID_DYLIB for use as an unversioned framework. Xcode users can do this
+ # by setting LD_DYLIB_INSTALL_NAME to
+ # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)
+ # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name
+ # at link time.
+ FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}"
+ LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" |
+ grep -A10 "^ *cmd LC_ID_DYLIB$" |
+ grep -m1 "^ *name" |
+ sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')"
+ VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}"
+ LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" |
+ sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")"
+
+ if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then
+ install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}"
+ fi
+fi
diff --git a/media/webrtc/trunk/build/mac/find_sdk.py b/media/webrtc/trunk/build/mac/find_sdk.py
new file mode 100755
index 000000000..ca58284ea
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/find_sdk.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import subprocess
+import sys
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output.
+
+Usage:
+ python find_sdk.py 10.6 # Ignores SDKs < 10.6
+"""
+
+from optparse import OptionParser
+
+
+def parse_version(version_str):
+ """'10.6' => [10, 6]"""
+ return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+ parser = OptionParser()
+ parser.add_option("--verify",
+ action="store_true", dest="verify", default=False,
+ help="return the sdk argument and warn if it doesn't exist")
+ parser.add_option("--sdk_path",
+ action="store", type="string", dest="sdk_path", default="",
+ help="user-specified SDK path; bypasses verification")
+ (options, args) = parser.parse_args()
+ min_sdk_version = args[0]
+
+ if sys.platform == 'darwin':
+ job = subprocess.Popen(['xcode-select', '-print-path'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, err = job.communicate()
+ if job.returncode != 0:
+ print >>sys.stderr, out
+ print >>sys.stderr, err
+ raise Exception(('Error %d running xcode-select, you might have to run '
+ '|sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer| '
+ 'if you are using Xcode 4.') % job.returncode)
+ # The Developer folder moved in Xcode 4.3.
+ xcode43_sdk_path = os.path.join(
+ out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
+ if os.path.isdir(xcode43_sdk_path):
+ sdk_dir = xcode43_sdk_path
+ else:
+ sdk_dir = os.path.join(out.rstrip(), 'SDKs')
+ sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+ sdks = [s[0] for s in sdks if s] # [['10.5'], ['10.6']] => ['10.5', '10.6']
+ sdks = [s for s in sdks # ['10.5', '10.6'] => ['10.6']
+ if parse_version(s) >= parse_version(min_sdk_version)]
+ if not sdks:
+ raise Exception('No %s+ SDK found' % min_sdk_version)
+ best_sdk = sorted(sdks, key=parse_version)[0]
+ else:
+ best_sdk = ""
+
+ if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+ print >>sys.stderr, ''
+ print >>sys.stderr, ' vvvvvvv'
+ print >>sys.stderr, ''
+ print >>sys.stderr, \
+ 'This build requires the %s SDK, but it was not found on your system.' \
+ % min_sdk_version
+ print >>sys.stderr, \
+ 'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
+ print >>sys.stderr, ''
+ print >>sys.stderr, ' ^^^^^^^'
+ print >>sys.stderr, ''
+ return min_sdk_version
+
+ return best_sdk
+
+
+if __name__ == '__main__':
+ print main()
diff --git a/media/webrtc/trunk/build/mac/make_more_helpers.sh b/media/webrtc/trunk/build/mac/make_more_helpers.sh
new file mode 100755
index 000000000..6f5c4749e
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/make_more_helpers.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: make_more_helpers.sh <directory_within_contents> <app_name>
+#
+# This script creates additional helper .app bundles for Chromium, based on
+# the existing helper .app bundle, changing their Mach-O header's flags to
+# enable and disable various features. Based on Chromium Helper.app, it will
+# create Chromium Helper EH.app, which has the MH_NO_HEAP_EXECUTION bit
+# cleared to support Chromium child processes that require an executable heap,
+# and Chromium Helper NP.app, which has the MH_PIE bit cleared to support
+# Chromium child processes that cannot tolerate ASLR.
+#
+# This script expects to be called from the chrome_exe target as a postbuild,
+# and operates directly within the built-up browser app's versioned directory.
+#
+# Each helper is adjusted by giving it the proper bundle name, renaming the
+# executable, adjusting several Info.plist keys, and changing the executable's
+# Mach-O flags.
+
+set -eu
+
+make_helper() {
+ local containing_dir="${1}"
+ local app_name="${2}"
+ local feature="${3}"
+ local flags="${4}"
+
+ local helper_name="${app_name} Helper"
+ local helper_stem="${containing_dir}/${helper_name}"
+ local original_helper="${helper_stem}.app"
+ if [[ ! -d "${original_helper}" ]]; then
+ echo "${0}: error: ${original_helper} is a required directory" >& 2
+ exit 1
+ fi
+ local original_helper_exe="${original_helper}/Contents/MacOS/${helper_name}"
+ if [[ ! -f "${original_helper_exe}" ]]; then
+ echo "${0}: error: ${original_helper_exe} is a required file" >& 2
+ exit 1
+ fi
+
+ local feature_helper="${helper_stem} ${feature}.app"
+
+ rsync -acC --delete --include '*.so' "${original_helper}/" "${feature_helper}"
+
+ local helper_feature="${helper_name} ${feature}"
+ local helper_feature_exe="${feature_helper}/Contents/MacOS/${helper_feature}"
+ mv "${feature_helper}/Contents/MacOS/${helper_name}" "${helper_feature_exe}"
+
+ local change_flags="$(dirname "${0}")/change_mach_o_flags.py"
+ "${change_flags}" ${flags} "${helper_feature_exe}"
+
+ local feature_info="${feature_helper}/Contents/Info"
+ local feature_info_plist="${feature_info}.plist"
+
+ defaults write "${feature_info}" "CFBundleDisplayName" "${helper_feature}"
+ defaults write "${feature_info}" "CFBundleExecutable" "${helper_feature}"
+
+ cfbundleid="$(defaults read "${feature_info}" "CFBundleIdentifier")"
+ feature_cfbundleid="${cfbundleid}.${feature}"
+ defaults write "${feature_info}" "CFBundleIdentifier" "${feature_cfbundleid}"
+
+ cfbundlename="$(defaults read "${feature_info}" "CFBundleName")"
+ feature_cfbundlename="${cfbundlename} ${feature}"
+ defaults write "${feature_info}" "CFBundleName" "${feature_cfbundlename}"
+
+ # As usual, defaults might have put the plist into whatever format excites
+ # it, but Info.plists get converted back to the expected XML format.
+ plutil -convert xml1 "${feature_info_plist}"
+
+ # `defaults` also changes the file permissions, so make the file
+ # world-readable again.
+ chmod a+r "${feature_info_plist}"
+}
+
+if [[ ${#} -ne 2 ]]; then
+ echo "usage: ${0} <directory_within_contents> <app_name>" >& 2
+ exit 1
+fi
+
+DIRECTORY_WITHIN_CONTENTS="${1}"
+APP_NAME="${2}"
+
+CONTENTS_DIR="${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}"
+CONTAINING_DIR="${CONTENTS_DIR}/${DIRECTORY_WITHIN_CONTENTS}"
+
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "EH" "--executable-heap"
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "NP" "--no-pie"
diff --git a/media/webrtc/trunk/build/mac/strip_from_xcode b/media/webrtc/trunk/build/mac/strip_from_xcode
new file mode 100755
index 000000000..c26b9fb49
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/strip_from_xcode
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a handy wrapper script that figures out how to call the strip
+# utility (strip_save_dsym in this case), if it even needs to be called at all,
+# and then does it. This script should be called by a post-link phase in
+# targets that might generate Mach-O executables, dynamic libraries, or
+# loadable bundles.
+#
+# An example "Strip If Needed" build phase placed after "Link Binary With
+# Libraries" would do:
+# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
+
+if [ "${CONFIGURATION}" != "Release" ] ; then
+ # Only strip in release mode.
+ exit 0
+fi
+
+declare -a FLAGS
+
+# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
+# Weird.
+if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
+ [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
+ # Strip everything (no special flags). No-op.
+ true
+elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
+ [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
+ # Strip debugging symbols and local symbols
+ FLAGS[${#FLAGS[@]}]=-S
+ FLAGS[${#FLAGS[@]}]=-x
+elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
+ # Don't strip static libraries.
+ exit 0
+else
+ # Warn, but don't treat this as an error.
+ echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
+ exit 0
+fi
+
+if [ -n "${STRIPFLAGS}" ] ; then
+ # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
+ # Flags".
+ for stripflag in "${STRIPFLAGS}" ; do
+ FLAGS[${#FLAGS[@]}]="${stripflag}"
+ done
+fi
+
+if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
+ # An Xcode project can communicate a file listing symbols to saved in this
+ # environment variable by setting it as a build setting. This isn't a
+ # standard Xcode setting. It's used in preference to STRIPFLAGS to
+ # eliminate quoting ambiguity concerns.
+ FLAGS[${#FLAGS[@]}]=-s
+ FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
+fi
+
+exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
+ "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/media/webrtc/trunk/build/mac/strip_save_dsym b/media/webrtc/trunk/build/mac/strip_save_dsym
new file mode 100755
index 000000000..ef08d831f
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/strip_save_dsym
@@ -0,0 +1,341 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
+#
+# strip_save_dsym is a wrapper around the standard strip utility. Given an
+# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
+# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
+# Note that the .dSYM file is a "fake" in that it's not a self-contained
+# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
+# file, and therefore contains references to object files on the filesystem.
+# The generated .dSYM bundle is therefore unsuitable for debugging in the
+# absence of these .o files.
+#
+# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
+# this utility does nothing. That allows strip_save_dsym to be run on a file
+# that has already been stripped without trashing the .dSYM.
+#
+# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
+# slow. On the other hand, doing a file copy (which is really all that
+# dsymutil does) is comparatively fast. Since we usually just want to strip
+# a release-mode executable but still be able to debug it, and we don't care
+# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
+# If a real dSYM is ever needed, it's still possible to create one by running
+# dsymutil and pointing it at the original Mach-O file inside the "fake"
+# bundle, provided that the object files are available.
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+
+# Returns a list of architectures contained in a Mach-O file. The file can be
+# a universal (fat) file, in which case there will be one list element for
+# each contained architecture, or it can be a thin single-architecture Mach-O
+# file, in which case the list will contain a single element identifying the
+# architecture. On error, returns an empty list. Determines the architecture
+# list by calling file.
+def macho_archs(macho):
+ macho_types = ["executable",
+ "dynamically linked shared library",
+ "bundle"]
+ macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
+
+ file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
+ stdout=subprocess.PIPE)
+
+ archs = []
+
+ type_line = file_cmd.stdout.readline()
+ type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
+ if type_match:
+ archs.append(type_match.group(1))
+ return [type_match.group(1)]
+ else:
+ type_match = re.match("^Mach-O universal binary with (.*) architectures$",
+ type_line)
+ if type_match:
+ for i in range(0, int(type_match.group(1))):
+ arch_line = file_cmd.stdout.readline()
+ arch_match = re.match(
+ "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
+ arch_line)
+ if arch_match:
+ archs.append(arch_match.group(1))
+
+ if file_cmd.wait() != 0:
+ archs = []
+
+ if len(archs) == 0:
+ print >> sys.stderr, "No architectures in %s" % macho
+
+ return archs
+
+# Returns a dictionary mapping architectures contained in the file as returned
+# by macho_archs to the LC_UUID load command for that architecture.
+# Architectures with no LC_UUID load command are omitted from the dictionary.
+# Determines the UUID value by calling otool.
+def macho_uuids(macho):
+ uuids = {}
+
+ archs = macho_archs(macho)
+ if len(archs) == 0:
+ return uuids
+
+ for arch in archs:
+ if arch == "":
+ continue
+
+ otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
+ macho],
+ stdout=subprocess.PIPE)
+ # state 0 is when nothing UUID-related has been seen yet. State 1 is
+ # entered after a load command begins, but it may not be an LC_UUID load
+ # command. States 2, 3, and 4 are intermediate states while reading an
+ # LC_UUID command. State 5 is the terminal state for a successful LC_UUID
+ # read. State 6 is the error state.
+ state = 0
+ uuid = ""
+ for otool_line in otool_cmd.stdout:
+ if state == 0:
+ if re.match("^Load command .*$", otool_line):
+ state = 1
+ elif state == 1:
+ if re.match("^ cmd LC_UUID$", otool_line):
+ state = 2
+ else:
+ state = 0
+ elif state == 2:
+ if re.match("^ cmdsize 24$", otool_line):
+ state = 3
+ else:
+ state = 6
+ elif state == 3:
+ # The UUID display format changed in the version of otool shipping
+ # with the Xcode 3.2.2 prerelease. The new format is traditional:
+ # uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+ # and with Xcode 3.2.6, then line is indented one more space:
+ # uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+ # The old format, from cctools-750 and older's otool, breaks the UUID
+ # up into a sequence of bytes:
+ # uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
+ # 0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
+ new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
+ otool_line)
+ if new_uuid_match:
+ uuid = new_uuid_match.group(1)
+
+ # Skip state 4, there is no second line to read.
+ state = 5
+ else:
+ old_uuid_match = re.match("^ uuid 0x(..) 0x(..) 0x(..) 0x(..) "
+ "0x(..) 0x(..) 0x(..) 0x(..)$",
+ otool_line)
+ if old_uuid_match:
+ state = 4
+ uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
+ old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
+ old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
+ old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
+ else:
+ state = 6
+ elif state == 4:
+ old_uuid_match = re.match("^ 0x(..) 0x(..) 0x(..) 0x(..) "
+ "0x(..) 0x(..) 0x(..) 0x(..)$",
+ otool_line)
+ if old_uuid_match:
+ state = 5
+ uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
+ old_uuid_match.group(3) + old_uuid_match.group(4) + \
+ old_uuid_match.group(5) + old_uuid_match.group(6) + \
+ old_uuid_match.group(7) + old_uuid_match.group(8)
+ else:
+ state = 6
+
+ if otool_cmd.wait() != 0:
+ state = 6
+
+ if state == 5:
+ uuids[arch] = uuid.upper()
+
+ if len(uuids) == 0:
+ print >> sys.stderr, "No UUIDs in %s" % macho
+
+ return uuids
+
+# Given a path to a Mach-O file and possible information from the environment,
+# determines the desired path to the .dSYM.
+def dsym_path(macho):
+ # If building a bundle, the .dSYM should be placed next to the bundle. Use
+ # WRAPPER_NAME to make this determination. If called from xcodebuild,
+ # WRAPPER_NAME will be set to the name of the bundle.
+ dsym = ""
+ if "WRAPPER_NAME" in os.environ:
+ if "BUILT_PRODUCTS_DIR" in os.environ:
+ dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
+ os.environ["WRAPPER_NAME"])
+ else:
+ dsym = os.environ["WRAPPER_NAME"]
+ else:
+ dsym = macho
+
+ dsym += ".dSYM"
+
+ return dsym
+
+# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
+# architectures and UUIDs specified by the uuids map.
+def make_fake_dsym(macho, dsym):
+ uuids = macho_uuids(macho)
+ if len(uuids) == 0:
+ return False
+
+ dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
+ dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
+ try:
+ os.makedirs(dwarf_dir)
+ except OSError, (err, error_string):
+ if err != errno.EEXIST:
+ raise
+ shutil.copyfile(macho, dwarf_file)
+
+ # info_template is the same as what dsymutil would have written, with the
+ # addition of the fake_dsym key.
+ info_template = \
+'''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+ <dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.apple.xcode.dsym.%(root_name)s</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>dSYM</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+ <key>dSYM_UUID</key>
+ <dict>
+%(uuid_dict)s </dict>
+ <key>fake_dsym</key>
+ <true/>
+ </dict>
+</plist>
+'''
+
+ root_name = os.path.basename(dsym)[:-5] # whatever.dSYM without .dSYM
+ uuid_dict = ""
+ for arch in sorted(uuids):
+ uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
+ "\t\t\t<string>" + uuids[arch] + "</string>\n"
+ info_dict = {
+ "root_name": root_name,
+ "uuid_dict": uuid_dict,
+ }
+ info_contents = info_template % info_dict
+ info_file = os.path.join(dsym, "Contents", "Info.plist")
+ info_fd = open(info_file, "w")
+ info_fd.write(info_contents)
+ info_fd.close()
+
+ return True
+
+# For a Mach-O file, determines where the .dSYM bundle should be located. If
+# the bundle does not exist or has a modification time older than the Mach-O
+# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
+# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
+# file to be identical.
+def strip_and_make_fake_dsym(macho):
+ dsym = dsym_path(macho)
+ macho_stat = os.stat(macho)
+ dsym_stat = None
+ try:
+ dsym_stat = os.stat(dsym)
+ except OSError, (err, error_string):
+ if err != errno.ENOENT:
+ raise
+
+ if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
+ # Make a .dSYM bundle
+ if not make_fake_dsym(macho, dsym):
+ return False
+
+ # Strip the Mach-O file
+ remove_dsym = True
+ try:
+ strip_path = ""
+ if "SYSTEM_DEVELOPER_BIN_DIR" in os.environ:
+ strip_path = os.environ["SYSTEM_DEVELOPER_BIN_DIR"]
+ else:
+ strip_path = "/usr/bin"
+ strip_path = os.path.join(strip_path, "strip")
+ strip_cmdline = [strip_path] + sys.argv[1:]
+ strip_cmd = subprocess.Popen(strip_cmdline)
+ if strip_cmd.wait() == 0:
+ remove_dsym = False
+ finally:
+ if remove_dsym:
+ shutil.rmtree(dsym)
+
+ # Update modification time on the Mach-O file and .dSYM bundle
+ now = time.time()
+ os.utime(macho, (now, now))
+ os.utime(dsym, (now, now))
+
+ return True
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ # This only supports operating on one file at a time. Look at the arguments
+ # to strip to figure out what the source to be stripped is. Arguments are
+ # processed in the same way that strip does, although to reduce complexity,
+ # this doesn't do all of the same checking as strip. For example, strip
+ # has no -Z switch and would treat -Z on the command line as an error. For
+ # the purposes this is needed for, that's fine.
+ macho = None
+ process_switches = True
+ ignore_argument = False
+ for arg in argv[1:]:
+ if ignore_argument:
+ ignore_argument = False
+ continue
+ if process_switches:
+ if arg == "-":
+ process_switches = False
+ # strip has these switches accept an argument:
+ if arg in ["-s", "-R", "-d", "-o", "-arch"]:
+ ignore_argument = True
+ if arg[0] == "-":
+ continue
+ if macho is None:
+ macho = arg
+ else:
+ print >> sys.stderr, "Too many things to strip"
+ return 1
+
+ if macho is None:
+ print >> sys.stderr, "Nothing to strip"
+ return 1
+
+ if not strip_and_make_fake_dsym(macho):
+ return 1
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/media/webrtc/trunk/build/mac/tweak_info_plist.py b/media/webrtc/trunk/build/mac/tweak_info_plist.py
new file mode 100755
index 000000000..9b57e7da0
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/tweak_info_plist.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+# this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+# we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+# the Info.plist has changed. So even if we updated it, it's only looking
+# at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place. This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+import optparse
+import os
+from os import environ as env
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.join(env['SRCROOT'], '..')
+
+sys.path.insert(0, os.path.join(TOP, "build/util"))
+import lastchange
+
+
+def _GetOutput(args):
+ """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+ of the process. stderr is attached to the parent."""
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ return (stdout, proc.returncode)
+
+
+def _GetOutputNoError(args):
+ """Similar to _GetOutput() but ignores stderr. If there's an error launching
+ the child (like file not found), the exception will be caught and (None, 1)
+ will be returned to mimic quiet failure."""
+ try:
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except OSError:
+ return (None, 1)
+ (stdout, stderr) = proc.communicate()
+ return (stdout, proc.returncode)
+
+
+def _RemoveKeys(plist, *keys):
+ """Removes a varargs of keys from the plist."""
+ for key in keys:
+ try:
+ del plist[key]
+ except KeyError:
+ pass
+
+
+def _AddVersionKeys(plist):
+ """Adds the product version number into the plist. Returns True on success and
+ False on error. The error will be printed to stderr."""
+ # Pull in the Chrome version number.
+ VERSION_TOOL = os.path.join(TOP, 'chrome/tools/build/version.py')
+ VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+
+ (stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+ '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
+ full_version = stdout.rstrip()
+
+ (stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+ '@BUILD@.@PATCH@'])
+ bundle_version = stdout.rstrip()
+
+ # If either of the two version commands finished with non-zero returncode,
+ # report the error up.
+ if retval1 or retval2:
+ return False
+
+ # Add public version info so "Get Info" works.
+ plist['CFBundleShortVersionString'] = full_version
+
+ # Honor the 429496.72.95 limit. The maximum comes from splitting 2^32 - 1
+ # into 6, 2, 2 digits. The limitation was present in Tiger, but it could
+ # have been fixed in later OS release, but hasn't been tested (it's easy
+ # enough to find out with "lsregister -dump).
+ # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+ # BUILD will always be an increasing value, so BUILD_PATH gives us something
+ # unique that meetings what LS wants.
+ plist['CFBundleVersion'] = bundle_version
+
+ # Return with no error.
+ return True
+
+
+def _DoSCMKeys(plist, add_keys):
+ """Adds the SCM information, visible in about:version, to property list. If
+ |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+ scm_path, scm_revision = None, None
+ if add_keys:
+ version_info = lastchange.FetchVersionInfo(
+ default_lastchange=None, directory=TOP)
+ scm_path, scm_revision = version_info.url, version_info.revision
+
+ # See if the operation failed.
+ _RemoveKeys(plist, 'SCMRevision')
+ if scm_revision != None:
+ plist['SCMRevision'] = scm_revision
+ elif add_keys:
+ print >>sys.stderr, 'Could not determine SCM revision. This may be OK.'
+
+ if scm_path != None:
+ plist['SCMPath'] = scm_path
+ else:
+ _RemoveKeys(plist, 'SCMPath')
+
+
+def _DoPDFKeys(plist, add_keys):
+ """Adds PDF support to the document types list. If add_keys is True, it will
+ add the type information dictionary. If it is False, it will remove it if
+ present."""
+
+ PDF_FILE_EXTENSION = 'pdf'
+
+ def __AddPDFKeys(sub_plist):
+ """Writes the keys into a sub-dictionary of the plist."""
+ sub_plist['CFBundleTypeExtensions'] = [PDF_FILE_EXTENSION]
+ sub_plist['CFBundleTypeIconFile'] = 'document.icns'
+ sub_plist['CFBundleTypeMIMETypes'] = 'application/pdf'
+ sub_plist['CFBundleTypeName'] = 'PDF Document'
+ sub_plist['CFBundleTypeRole'] = 'Viewer'
+
+ DOCUMENT_TYPES_KEY = 'CFBundleDocumentTypes'
+
+ # First get the list of document types, creating it if necessary.
+ try:
+ extensions = plist[DOCUMENT_TYPES_KEY]
+ except KeyError:
+ # If this plist doesn't have a type dictionary, create one if set to add the
+ # keys. If not, bail.
+ if not add_keys:
+ return
+ extensions = plist[DOCUMENT_TYPES_KEY] = []
+
+ # Loop over each entry in the list, looking for one that handles PDF types.
+ for i, ext in enumerate(extensions):
+ # If an entry for .pdf files is found...
+ if 'CFBundleTypeExtensions' not in ext:
+ continue
+ if PDF_FILE_EXTENSION in ext['CFBundleTypeExtensions']:
+ if add_keys:
+ # Overwrite the existing keys with new ones.
+ __AddPDFKeys(ext)
+ else:
+ # Otherwise, delete the entry entirely.
+ del extensions[i]
+ return
+
+ # No PDF entry exists. If one needs to be added, do so now.
+ if add_keys:
+ pdf_entry = {}
+ __AddPDFKeys(pdf_entry)
+ extensions.append(pdf_entry)
+
+
+def _AddBreakpadKeys(plist, branding):
+ """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+ also requires the |branding| argument."""
+ plist['BreakpadReportInterval'] = '3600' # Deliberately a string.
+ plist['BreakpadProduct'] = '%s_Mac' % branding
+ plist['BreakpadProductDisplay'] = branding
+ plist['BreakpadVersion'] = plist['CFBundleShortVersionString']
+ # These are both deliberately strings and not boolean.
+ plist['BreakpadSendAndExit'] = 'YES'
+ plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+ """Removes any set Breakpad keys."""
+ _RemoveKeys(plist,
+ 'BreakpadURL',
+ 'BreakpadReportInterval',
+ 'BreakpadProduct',
+ 'BreakpadProductDisplay',
+ 'BreakpadVersion',
+ 'BreakpadSendAndExit',
+ 'BreakpadSkipConfirm')
+
+
+def _AddKeystoneKeys(plist, bundle_identifier):
+ """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+ also requires the |bundle_identifier| argument (com.example.product)."""
+ plist['KSVersion'] = plist['CFBundleShortVersionString']
+ plist['KSProductID'] = bundle_identifier
+ plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+
+def _RemoveKeystoneKeys(plist):
+ """Removes any set Keystone keys."""
+ _RemoveKeys(plist,
+ 'KSVersion',
+ 'KSProductID',
+ 'KSUpdateURL')
+
+
+def Main(argv):
+ parser = optparse.OptionParser('%prog [options]')
+ parser.add_option('--breakpad', dest='use_breakpad', action='store',
+ type='int', default=False, help='Enable Breakpad [1 or 0]')
+ parser.add_option('--breakpad_uploads', dest='breakpad_uploads',
+ action='store', type='int', default=False,
+ help='Enable Breakpad\'s uploading of crash dumps [1 or 0]')
+ parser.add_option('--keystone', dest='use_keystone', action='store',
+ type='int', default=False, help='Enable Keystone [1 or 0]')
+ parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
+ default=True, help='Add SCM metadata [1 or 0]')
+ parser.add_option('--pdf', dest='add_pdf_support', action='store', type='int',
+ default=False, help='Add PDF file handler support [1 or 0]')
+ parser.add_option('--branding', dest='branding', action='store',
+ type='string', default=None, help='The branding of the binary')
+ parser.add_option('--bundle_id', dest='bundle_identifier',
+ action='store', type='string', default=None,
+ help='The bundle id of the binary')
+ (options, args) = parser.parse_args(argv)
+
+ if len(args) > 0:
+ print >>sys.stderr, parser.get_usage()
+ return 1
+
+ # Read the plist into its parsed format.
+ DEST_INFO_PLIST = os.path.join(env['TARGET_BUILD_DIR'], env['INFOPLIST_PATH'])
+ plist = plistlib.readPlist(DEST_INFO_PLIST)
+
+ # Insert the product version.
+ if not _AddVersionKeys(plist):
+ return 2
+
+ # Add Breakpad if configured to do so.
+ if options.use_breakpad:
+ if options.branding is None:
+ print >>sys.stderr, 'Use of Breakpad requires branding.'
+ return 1
+ _AddBreakpadKeys(plist, options.branding)
+ if options.breakpad_uploads:
+ plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+ else:
+ # This allows crash dumping to a file without uploading the
+ # dump, for testing purposes. Breakpad does not recognise
+ # "none" as a special value, but this does stop crash dump
+ # uploading from happening. We need to specify something
+ # because if "BreakpadURL" is not present, Breakpad will not
+ # register its crash handler and no crash dumping will occur.
+ plist['BreakpadURL'] = 'none'
+ else:
+ _RemoveBreakpadKeys(plist)
+
+ # Only add Keystone in Release builds.
+ if options.use_keystone and env['CONFIGURATION'] == 'Release':
+ if options.bundle_identifier is None:
+ print >>sys.stderr, 'Use of Keystone requires the bundle id.'
+ return 1
+ _AddKeystoneKeys(plist, options.bundle_identifier)
+ else:
+ _RemoveKeystoneKeys(plist)
+
+ # Adds or removes any SCM keys.
+ _DoSCMKeys(plist, options.add_scm_info)
+
+ # Adds or removes the PDF file handler entry.
+ _DoPDFKeys(plist, options.add_pdf_support)
+
+ # Now that all keys have been mutated, rewrite the file.
+ temp_info_plist = tempfile.NamedTemporaryFile()
+ plistlib.writePlist(plist, temp_info_plist.name)
+
+ # Info.plist will work perfectly well in any plist format, but traditionally
+ # applications use xml1 for this, so convert it to ensure that it's valid.
+ proc = subprocess.Popen(['plutil', '-convert', 'xml1', '-o', DEST_INFO_PLIST,
+ temp_info_plist.name])
+ proc.wait()
+ return proc.returncode
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/media/webrtc/trunk/build/mac/verify_no_objc.sh b/media/webrtc/trunk/build/mac/verify_no_objc.sh
new file mode 100755
index 000000000..955f9beff
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/verify_no_objc.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes sure that no __OBJC,__image_info section appears in the
+# executable file built by the Xcode target that runs the script. If such a
+# section appears, the script prints an error message and exits nonzero.
+#
+# Why is this important?
+#
+# On 10.5, there's a bug in CFBundlePreflightExecutable that causes it to
+# crash when operating in an executable that has not loaded at its default
+# address (that is, when it's a position-independent executable with the
+# MH_PIE bit set in its mach_header) and the executable has an
+# __OBJC,__image_info section. See http://crbug.com/88697.
+#
+# Chrome's main executables don't use any Objective-C at all, and don't need
+# to carry this section around. Not linking them as Objective-C when they
+# don't need it anyway saves about 4kB in the linked executable, although most
+# of that 4kB is just filled with zeroes.
+#
+# This script makes sure that nobody goofs and accidentally introduces these
+# sections into the main executables.
+
+set -eu
+
+otool="${DEVELOPER_BIN_DIR:-/usr/bin}/otool"
+executable="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if "${otool}" -arch i386 -o "${executable}" | grep -q '^Contents.*section$'; \
+then
+ echo "${0}: ${executable} has an __OBJC,__image_info section" 2>&1
+ exit 1
+fi
+
+if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
+ echo "${0}: otool failed" 2>&1
+ exit 1
+fi
+
+exit 0
diff --git a/media/webrtc/trunk/build/nocompile.gypi b/media/webrtc/trunk/build/nocompile.gypi
new file mode 100644
index 000000000..f9021ae37
--- /dev/null
+++ b/media/webrtc/trunk/build/nocompile.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests. A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+# http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'my_module_nc_unittests',
+# 'type': 'executable',
+# 'sources': [
+# 'nc_testset_1.nc',
+# 'nc_testset_2.nc',
+# ],
+# 'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile. Each individual test case in the file should be put in its own
+# #ifdef section. The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions. This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+# #if defined(TEST_NEEDS_SEMICOLON) // [r"expected ',' or ';' at end of input"]
+#
+# int a = 1
+#
+# #elif defined(TEST_NEEDS_CAST) // [r"invalid conversion from 'void*' to 'char*'"]
+#
+# void* a = NULL;
+# char* b = a;
+#
+# #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+# DISABLE_TEST_NEEDS_SEMICOLON
+# TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+{
+ # TODO(awong): Disabled until http://crbug.com/105388 is resolved.
+ 'sources/': [['exclude', '\\.nc$']],
+ 'conditions': [
+ [ 'OS=="linux" and clang==0', {
+ 'rules': [
+ {
+ 'variables': {
+ 'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py',
+ 'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/'
+ '<(RULE_INPUT_ROOT)_nc.cc'),
+ },
+ 'rule_name': 'run_nocompile',
+ 'extension': 'nc',
+ 'inputs': [
+ '<(nocompile_driver)',
+ ],
+ 'outputs': [
+ '<(nc_result_path)'
+ ],
+ 'action': [
+ 'python',
+ '<(nocompile_driver)',
+ '4', # number of compilers to invoke in parallel.
+ '<(RULE_INPUT_PATH)',
+ '-Wall -Werror -Wfatal-errors -I<(DEPTH)',
+ '<(nc_result_path)',
+ ],
+ 'message': 'Generating no compile results for <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ }, {
+ 'sources/': [['exclude', '\\.nc$']]
+ }], # 'OS=="linux" and clang=="0"'
+ ],
+}
+
diff --git a/media/webrtc/trunk/build/output_dll_copy.rules b/media/webrtc/trunk/build/output_dll_copy.rules
new file mode 100644
index 000000000..c6e905131
--- /dev/null
+++ b/media/webrtc/trunk/build/output_dll_copy.rules
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<VisualStudioToolFile
+ Name="Output DLL copy"
+ Version="8.00"
+ >
+ <Rules>
+ <CustomBuildRule
+ Name="Output DLL copy"
+ CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
+ Outputs="$(OutDir)\$(InputFileName)"
+ FileExtensions="*.dll"
+ >
+ <Properties>
+ </Properties>
+ </CustomBuildRule>
+ </Rules>
+</VisualStudioToolFile>
diff --git a/media/webrtc/trunk/build/precompile.cc b/media/webrtc/trunk/build/precompile.cc
new file mode 100644
index 000000000..db1ef6dfe
--- /dev/null
+++ b/media/webrtc/trunk/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/media/webrtc/trunk/build/precompile.h b/media/webrtc/trunk/build/precompile.h
new file mode 100644
index 000000000..a3c5193c6
--- /dev/null
+++ b/media/webrtc/trunk/build/precompile.h
@@ -0,0 +1,108 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header for Chromium project on Windows, not used by
+// other build configurations. Using precompiled headers speeds the
+// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
+// GB of memory.
+//
+// Numeric comments beside includes are the number of times they were
+// included under src/chrome/browser on 2011/8/20, which was used as a
+// baseline for deciding what to include in the PCH. Includes without
+// a numeric comment are generally included at least 5 times. It may
+// be possible to tweak the speed of the build by commenting out or
+// removing some of the less frequently used headers.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+// The Windows header needs to come before almost all the other
+// Windows-specific headers.
+#include <Windows.h>
+#include <dwmapi.h>
+#include <shellapi.h>
+#include <wincrypt.h> // 4
+#include <wtypes.h> // 2
+
+// Defines in atlbase.h cause conflicts; if we could figure out how
+// this family of headers can be included in the PCH, it might speed
+// up the build as several of them are used frequently.
+/*
+#include <atlbase.h>
+#include <atlapp.h>
+#include <atlcom.h>
+#include <atlcrack.h> // 2
+#include <atlctrls.h> // 2
+#include <atlmisc.h> // 2
+#include <atlsafe.h> // 1
+#include <atltheme.h> // 1
+#include <atlwin.h> // 2
+*/
+
+// Objbase.h and other files that rely on it bring in [ #define
+// interface struct ] which can cause problems in a multi-platform
+// build like Chrome's. #undef-ing it does not work as there are
+// currently 118 targets that break if we do this, so leaving out of
+// the precompiled header for now.
+//#include <commctrl.h> // 2
+//#include <commdlg.h> // 3
+//#include <cryptuiapi.h> // 2
+//#include <Objbase.h> // 2
+//#include <objidl.h> // 1
+//#include <ole2.h> // 1
+//#include <oleacc.h> // 2
+//#include <oleauto.h> // 1
+//#include <oleidl.h> // 1
+//#include <propkey.h> // 2
+//#include <propvarutil.h> // 2
+//#include <pstore.h> // 2
+//#include <shlguid.h> // 1
+//#include <shlwapi.h> // 1
+//#include <shobjidl.h> // 4
+//#include <urlhist.h> // 2
+
+// Caused other conflicts in addition to the 'interface' issue above.
+// #include <shlobj.h>
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h> // 4
+#include <math.h>
+#include <memory.h> // 1
+#include <signal.h>
+#include <stdarg.h> // 1
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h> // 4
+
+#include <algorithm>
+#include <bitset> // 3
+#include <cmath>
+#include <cstddef>
+#include <cstdio> // 3
+#include <cstdlib> // 2
+#include <cstring>
+#include <deque>
+#include <fstream> // 3
+#include <functional>
+#include <iomanip> // 2
+#include <iosfwd> // 2
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric> // 2
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
diff --git a/media/webrtc/trunk/build/protoc.gypi b/media/webrtc/trunk/build/protoc.gypi
new file mode 100644
index 000000000..897e446e8
--- /dev/null
+++ b/media/webrtc/trunk/build/protoc.gypi
@@ -0,0 +1,116 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'my_proto_lib',
+# 'type': 'static_library',
+# 'sources': [
+# 'foo.proto',
+# 'bar.proto',
+# ],
+# 'variables': {
+# # Optional, see below: 'proto_in_dir': '.'
+# 'proto_out_dir': 'dir/for/my_proto_lib'
+# },
+# 'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies. The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files. If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under. Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+# #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# If you need to add an EXPORT macro to a protobuf's c++ header, set the
+# 'cc_generator_options' variable with the value: 'dllexport_decl=FOO_EXPORT:'
+# e.g. 'dllexport_decl=BASE_EXPORT:'
+#
+# It is likely you also need to #include a file for the above EXPORT macro to
+# work. You can do so with the 'cc_include' variable.
+# e.g. 'base/base_export.h'
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+# <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+# <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+ 'variables': {
+ 'protoc_wrapper': '<(DEPTH)/tools/protoc_wrapper/protoc_wrapper.py',
+ 'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+ 'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+ 'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+ 'cc_generator_options%': '',
+ 'cc_include%': '',
+ 'proto_in_dir%': '.',
+ },
+ 'rules': [
+ {
+ 'rule_name': 'genproto',
+ 'extension': 'proto',
+ 'inputs': [
+ '<(protoc_wrapper)',
+ '<(protoc)',
+ ],
+ 'outputs': [
+ '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+ '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+ '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+ ],
+ 'action': [
+ 'python',
+ '<(protoc_wrapper)',
+ '--include',
+ '<(cc_include)',
+ '--protobuf',
+ '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+ '--',
+ '<(protoc)',
+ # Using the --arg val form (instead of --arg=val) allows gyp's msvs rule
+ # generation to correct 'val' which is a path.
+ '--proto_path','<(proto_in_dir)',
+ # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+ # --proto_path is a strict prefix of the path given as an argument.
+ '<(proto_in_dir)/<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+ '--cpp_out', '<(cc_generator_options)<(cc_dir)',
+ '--python_out', '<(py_dir)',
+ ],
+ 'msvs_cygwin_shell': 0,
+ 'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ },
+ ],
+ 'dependencies': [
+ '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+ '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+ '<(DEPTH)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+ '<(DEPTH)',
+ ]
+ },
+ 'export_dependent_settings': [
+ # The generated headers reference headers within protobuf_lite,
+ # so dependencies must be able to find those headers too.
+ '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+ ],
+ # This target exports a hard dependency because it generates header
+ # files.
+ 'hard_dependency': 1,
+}
diff --git a/media/webrtc/trunk/build/release.gypi b/media/webrtc/trunk/build/release.gypi
new file mode 100644
index 000000000..7595ef5a2
--- /dev/null
+++ b/media/webrtc/trunk/build/release.gypi
@@ -0,0 +1,17 @@
+{
+ 'conditions': [
+ # Handle build types.
+ ['buildtype=="Dev"', {
+ 'includes': ['internal/release_impl.gypi'],
+ }],
+ ['buildtype=="Official"', {
+ 'includes': ['internal/release_impl_official.gypi'],
+ }],
+ # TODO(bradnelson): may also need:
+ # checksenabled
+ # coverage
+ # dom_stats
+ # pgo_instrument
+ # pgo_optimize
+ ],
+}
diff --git a/media/webrtc/trunk/build/sanitize-mac-build-log.sed b/media/webrtc/trunk/build/sanitize-mac-build-log.sed
new file mode 100755
index 000000000..3312eac5a
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitize-mac-build-log.sed
@@ -0,0 +1,35 @@
+#!/bin/echo Use sanitize-mac-build-log.sh or sed -f
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^ setenv /d
+/^ cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line. These deletions drop the command line.
+\|^ /Developer/usr/bin/|d
+\|^ /Developer/Library/PrivateFrameworks/DevToolsCore.framework/|d
+\|^ /Developer/Library/Xcode/Plug-ins/CoreBuildTasks.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^ .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^ /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2| CC \1|
+s|^CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2| CC \1|
diff --git a/media/webrtc/trunk/build/sanitize-mac-build-log.sh b/media/webrtc/trunk/build/sanitize-mac-build-log.sh
new file mode 100755
index 000000000..dc743fabb
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitize-mac-build-log.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -f `dirname "${0}"`/`basename "${0}" sh`sed
+
diff --git a/media/webrtc/trunk/build/sanitize-win-build-log.sed b/media/webrtc/trunk/build/sanitize-win-build-log.sed
new file mode 100755
index 000000000..c8bffde87
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitize-win-build-log.sed
@@ -0,0 +1,17 @@
+#!/bin/echo Use sanitize-win-build-log.sh or sed -f
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]\+>//
+
+# Shorten bindings generation lines
+s/^.*"perl".*generate-bindings.pl".*\("[^"]\+\.idl"\).*$/ generate-bindings \1/
diff --git a/media/webrtc/trunk/build/sanitize-win-build-log.sh b/media/webrtc/trunk/build/sanitize-win-build-log.sh
new file mode 100755
index 000000000..dc743fabb
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitize-win-build-log.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -f `dirname "${0}"`/`basename "${0}" sh`sed
+
diff --git a/media/webrtc/trunk/build/some.gyp b/media/webrtc/trunk/build/some.gyp
new file mode 100644
index 000000000..44a1dd59e
--- /dev/null
+++ b/media/webrtc/trunk/build/some.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'some',
+ 'type': 'none',
+ 'dependencies': [
+ # This file is intended to be locally modified. List the targets you use
+ # regularly. The generated some.sln will contains projects for only
+ # those targets and the targets they are transitively dependent on. This
+ # can result in a solution that loads and unloads faster in Visual
+ # Studio.
+ #
+ # Tip: Create a dummy CL to hold your local edits to this file, so they
+ # don't accidentally get added to another CL that you are editing.
+ #
+ # Example:
+ # '../chrome/chrome.gyp:chrome',
+ ],
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/temp_gyp/README.chromium b/media/webrtc/trunk/build/temp_gyp/README.chromium
new file mode 100644
index 000000000..8045d6159
--- /dev/null
+++ b/media/webrtc/trunk/build/temp_gyp/README.chromium
@@ -0,0 +1,3 @@
+This directory will be removed once the files in it are committed upstream and
+Chromium imports an upstream revision with these files. Contact mark for
+details.
diff --git a/media/webrtc/trunk/build/temp_gyp/googleurl.gyp b/media/webrtc/trunk/build/temp_gyp/googleurl.gyp
new file mode 100644
index 000000000..1bd9a7ad3
--- /dev/null
+++ b/media/webrtc/trunk/build/temp_gyp/googleurl.gyp
@@ -0,0 +1,105 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(mark): Upstream this file to googleurl.
+{
+ 'variables': {
+ 'chromium_code': 1,
+ },
+ 'targets': [
+ {
+ 'target_name': 'googleurl',
+ 'type': '<(component)',
+ 'dependencies': [
+ '../../base/base.gyp:base',
+ '../../third_party/icu/icu.gyp:icudata',
+ '../../third_party/icu/icu.gyp:icui18n',
+ '../../third_party/icu/icu.gyp:icuuc',
+ ],
+ 'sources': [
+ '../../googleurl/src/gurl.cc',
+ '../../googleurl/src/gurl.h',
+ '../../googleurl/src/url_canon.h',
+ '../../googleurl/src/url_canon_etc.cc',
+ '../../googleurl/src/url_canon_fileurl.cc',
+ '../../googleurl/src/url_canon_filesystemurl.cc',
+ '../../googleurl/src/url_canon_host.cc',
+ '../../googleurl/src/url_canon_icu.cc',
+ '../../googleurl/src/url_canon_icu.h',
+ '../../googleurl/src/url_canon_internal.cc',
+ '../../googleurl/src/url_canon_internal.h',
+ '../../googleurl/src/url_canon_internal_file.h',
+ '../../googleurl/src/url_canon_ip.cc',
+ '../../googleurl/src/url_canon_ip.h',
+ '../../googleurl/src/url_canon_mailtourl.cc',
+ '../../googleurl/src/url_canon_path.cc',
+ '../../googleurl/src/url_canon_pathurl.cc',
+ '../../googleurl/src/url_canon_query.cc',
+ '../../googleurl/src/url_canon_relative.cc',
+ '../../googleurl/src/url_canon_stdstring.h',
+ '../../googleurl/src/url_canon_stdurl.cc',
+ '../../googleurl/src/url_file.h',
+ '../../googleurl/src/url_parse.cc',
+ '../../googleurl/src/url_parse.h',
+ '../../googleurl/src/url_parse_file.cc',
+ '../../googleurl/src/url_parse_internal.h',
+ '../../googleurl/src/url_util.cc',
+ '../../googleurl/src/url_util.h',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '../..',
+ ],
+ },
+ 'defines': [
+ 'FULL_FILESYSTEM_URL_SUPPORT=1',
+ ],
+ 'conditions': [
+ ['component=="shared_library"', {
+ 'defines': [
+ 'GURL_DLL',
+ 'GURL_IMPLEMENTATION=1',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'GURL_DLL',
+ ],
+ },
+ }],
+ ],
+ },
+ {
+ 'target_name': 'googleurl_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'googleurl',
+ '../../base/base.gyp:base_i18n',
+ '../../base/base.gyp:run_all_unittests',
+ '../../testing/gtest.gyp:gtest',
+ '../../third_party/icu/icu.gyp:icuuc',
+ ],
+ 'sources': [
+ '../../googleurl/src/gurl_unittest.cc',
+ '../../googleurl/src/url_canon_unittest.cc',
+ '../../googleurl/src/url_parse_unittest.cc',
+ '../../googleurl/src/url_test_utils.h',
+ '../../googleurl/src/url_util_unittest.cc',
+ ],
+ 'defines': [
+ 'FULL_FILESYSTEM_URL_SUPPORT=1',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!="mac" and OS!="ios"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/temp_gyp/pdfsqueeze.gyp b/media/webrtc/trunk/build/temp_gyp/pdfsqueeze.gyp
new file mode 100644
index 000000000..2b3b1ff81
--- /dev/null
+++ b/media/webrtc/trunk/build/temp_gyp/pdfsqueeze.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'pdfsqueeze',
+ 'type': 'executable',
+ 'sources': [
+ '../../third_party/pdfsqueeze/pdfsqueeze.m',
+ ],
+ 'defines': [
+ # Use defines to map the full path names that will be used for
+ # the vars into the short forms expected by pdfsqueeze.m.
+ '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter=ApplyGenericRGB_qfilter',
+ '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter_len=ApplyGenericRGB_qfilter_len',
+ ],
+ 'include_dirs': [
+ '<(INTERMEDIATE_DIR)',
+ ],
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/Quartz.framework',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'Generate inline filter data',
+ 'inputs': [
+ '../../third_party/pdfsqueeze/ApplyGenericRGB.qfilter',
+ ],
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/ApplyGenericRGB.h',
+ ],
+ 'action': ['xxd', '-i', '<@(_inputs)', '<@(_outputs)'],
+ },
+ ],
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/update-linux-sandbox.sh b/media/webrtc/trunk/build/update-linux-sandbox.sh
new file mode 100755
index 000000000..ebf8c105a
--- /dev/null
+++ b/media/webrtc/trunk/build/update-linux-sandbox.sh
@@ -0,0 +1,75 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/out/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+ echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+ exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+ echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+ exit 1
+fi
+
+installsandbox() {
+ echo "(using sudo so you may be asked for your password)"
+ sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+ "${CHROME_SANDBOX_INST_PATH}" &&
+ sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+ sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+ return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+ echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+ echo "If you are building in Release mode"
+ exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+ echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, "
+ echo "please make sure you build the chrome_sandbox target"
+ exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+ echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+ echo "installing it now."
+ installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+ echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+ exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+ echo "Your installed setuid sandbox is too old, installing it now."
+ if ! installsandbox; then
+ echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+ exit 1
+ fi
+else
+ echo "Your setuid sandbox is up to date"
+ if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+ echo -n "Make sure you have \"export "
+ echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+ echo "somewhere in your .bashrc"
+ echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+ fi
+fi
diff --git a/media/webrtc/trunk/build/util/lastchange.py b/media/webrtc/trunk/build/util/lastchange.py
new file mode 100755
index 000000000..a101341ef
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lastchange.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+
+import re
+import optparse
+import os
+import subprocess
+import sys
+
+_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
+
+class VersionInfo(object):
+ def __init__(self, url, revision):
+ self.url = url
+ self.revision = revision
+
+
+def FetchSVNRevision(directory, svn_url_regex):
+ """
+ Fetch the Subversion branch and revision for a given directory.
+
+ Errors are swallowed.
+
+ Returns:
+ A VersionInfo object or None on error.
+ """
+ try:
+ proc = subprocess.Popen(['svn', 'info'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=directory,
+ shell=(sys.platform=='win32'))
+ except OSError:
+ # command is apparently either not installed or not executable.
+ return None
+ if not proc:
+ return None
+
+ attrs = {}
+ for line in proc.stdout:
+ line = line.strip()
+ if not line:
+ continue
+ key, val = line.split(': ', 1)
+ attrs[key] = val
+
+ try:
+ match = svn_url_regex.search(attrs['URL'])
+ if match:
+ url = match.group(2)
+ else:
+ url = ''
+ revision = attrs['Revision']
+ except KeyError:
+ return None
+
+ return VersionInfo(url, revision)
+
+
+def RunGitCommand(directory, command):
+ """
+ Launches git subcommand.
+
+ Errors are swallowed.
+
+ Returns:
+ A process object or None.
+ """
+ command = ['git'] + command
+ # Force shell usage under cygwin. This is a workaround for
+ # mysterious loss of cwd while invoking cygwin's git.
+ # We can't just pass shell=True to Popen, as under win32 this will
+ # cause CMD to be used, while we explicitly want a cygwin shell.
+ if sys.platform == 'cygwin':
+ command = ['sh', '-c', ' '.join(command)]
+ try:
+ proc = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=directory,
+ shell=(sys.platform=='win32'))
+ return proc
+ except OSError:
+ return None
+
+
+def FetchGitRevision(directory):
+ """
+ Fetch the Git hash for a given directory.
+
+ Errors are swallowed.
+
+ Returns:
+ A VersionInfo object or None on error.
+ """
+ proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
+ if proc:
+ output = proc.communicate()[0].strip()
+ if proc.returncode == 0 and output:
+ return VersionInfo('git', output[:7])
+ return None
+
+
+def FetchGitSVNURLAndRevision(directory, svn_url_regex):
+ """
+ Fetch the Subversion URL and revision through Git.
+
+ Errors are swallowed.
+
+ Returns:
+ A tuple containing the Subversion URL and revision.
+ """
+ proc = RunGitCommand(directory, ['log', '-1',
+ '--grep=git-svn-id', '--format=%b'])
+ if proc:
+ output = proc.communicate()[0].strip()
+ if proc.returncode == 0 and output:
+ # Extract the latest SVN revision and the SVN URL.
+ # The target line is the last "git-svn-id: ..." line like this:
+ # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+ match = _GIT_SVN_ID_REGEX.search(output)
+ if match:
+ revision = match.group(2)
+ url_match = svn_url_regex.search(match.group(1))
+ if url_match:
+ url = url_match.group(2)
+ else:
+ url = ''
+ return url, revision
+ return None, None
+
+
+def FetchGitSVNRevision(directory, svn_url_regex):
+ """
+ Fetch the Git-SVN identifier for the local tree.
+
+ Errors are swallowed.
+ """
+ url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex)
+ if url and revision:
+ return VersionInfo(url, revision)
+ return None
+
+
+def FetchVersionInfo(default_lastchange, directory=None,
+ directory_regex_prior_to_src_url='chrome|svn'):
+ """
+ Returns the last change (in the form of a branch, revision tuple),
+ from some appropriate revision control system.
+ """
+ svn_url_regex = re.compile(
+ r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
+
+ version_info = (FetchSVNRevision(directory, svn_url_regex) or
+ FetchGitSVNRevision(directory, svn_url_regex) or
+ FetchGitRevision(directory))
+ if not version_info:
+ if default_lastchange and os.path.exists(default_lastchange):
+ revision = open(default_lastchange, 'r').read().strip()
+ version_info = VersionInfo(None, revision)
+ else:
+ version_info = VersionInfo(None, None)
+ return version_info
+
+
+def WriteIfChanged(file_name, contents):
+ """
+ Writes the specified contents to the specified file_name
+ iff the contents are different than the current contents.
+ """
+ try:
+ old_contents = open(file_name, 'r').read()
+ except EnvironmentError:
+ pass
+ else:
+ if contents == old_contents:
+ return
+ os.unlink(file_name)
+ open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ parser = optparse.OptionParser(usage="lastchange.py [options]")
+ parser.add_option("-d", "--default-lastchange", metavar="FILE",
+ help="default last change input FILE")
+ parser.add_option("-o", "--output", metavar="FILE",
+ help="write last change to FILE")
+ parser.add_option("--revision-only", action='store_true',
+ help="just print the SVN revision number")
+ opts, args = parser.parse_args(argv[1:])
+
+ out_file = opts.output
+
+ while len(args) and out_file is None:
+ if out_file is None:
+ out_file = args.pop(0)
+ if args:
+ sys.stderr.write('Unexpected arguments: %r\n\n' % args)
+ parser.print_help()
+ sys.exit(2)
+
+ version_info = FetchVersionInfo(opts.default_lastchange,
+ os.path.dirname(sys.argv[0]))
+
+ if version_info.revision == None:
+ version_info.revision = '0'
+
+ if opts.revision_only:
+ print version_info.revision
+ else:
+ contents = "LASTCHANGE=%s\n" % version_info.revision
+ if out_file:
+ WriteIfChanged(out_file, contents)
+ else:
+ sys.stdout.write(contents)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/whitespace_file.txt b/media/webrtc/trunk/build/whitespace_file.txt
new file mode 100644
index 000000000..087f0ad75
--- /dev/null
+++ b/media/webrtc/trunk/build/whitespace_file.txt
@@ -0,0 +1,69 @@
+Copyright (c) 2012 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+=========================================================================
+
+Let's make a story. Add one sentence for every commit:
+
+CHAPTER 1.0:
+It was a dark and stormy night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the darkness. A hooded figure emerged.
+
+It was a Domo-Kun. "What took you so long?", inquired his wife.
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt", he snickered.
+
+The pause was filled with the sound of thunder.
+
+
+CHAPTER 2.0:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3.0:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed him so far.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred, just the week before.
+
+Next time, there won't be any sushi. Why sushis with waffles anyway? It's like
+salmon in a cereal bowl.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from. He tries to recall the cook's face. Purple?
+
+CHAPTER 5:
+Many years later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun.
+Another day in the life...
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+
+This commit will change the world as we know it.
diff --git a/media/webrtc/trunk/build/win/chrome_win.croc b/media/webrtc/trunk/build/win/chrome_win.croc
new file mode 100644
index 000000000..e1e3bb76d
--- /dev/null
+++ b/media/webrtc/trunk/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+ # List of rules, applied in order
+ 'rules' : [
+ # Specify inclusions before exclusions, since rules are in order.
+
+ # Don't include chromeos, posix, or linux specific files
+ {
+ 'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+ 'include' : 0,
+ },
+ # Don't include ChromeOS dirs
+ {
+ 'regexp' : '.*/chromeos/',
+ 'include' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '.*_test_win\\.',
+ 'group' : 'test',
+ },
+ ],
+}
diff --git a/media/webrtc/trunk/build/win/install-build-deps.py b/media/webrtc/trunk/build/win/install-build-deps.py
new file mode 100755
index 000000000..d9e50b6e7
--- /dev/null
+++ b/media/webrtc/trunk/build/win/install-build-deps.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+import os
+
+def patch_msbuild():
+ """VS2010 MSBuild has a ULDI bug that we patch here. See http://goo.gl/Pn8tj.
+ """
+ source_path = os.path.join(os.environ['ProgramFiles(x86)'],
+ "MSBuild",
+ "Microsoft.Cpp",
+ "v4.0",
+ "Microsoft.CppBuild.targets")
+ backup_path = source_path + ".backup"
+ if not os.path.exists(backup_path):
+ try:
+ print "Backing up %s..." % source_path
+ shutil.copyfile(source_path, backup_path)
+ except IOError:
+ print "Could not back up %s to %s. Run as Administrator?" % (
+ source_path, backup_path)
+ return 1
+
+ source = open(source_path).read()
+ base = ('''<Target Name="GetResolvedLinkObjs" Returns="@(ObjFullPath)" '''
+ '''DependsOnTargets="$(CommonBuildOnlyTargets);ComputeCLOutputs;'''
+ '''ResolvedLinkObjs"''')
+ find = base + '>'
+ replace = base + ''' Condition="'$(ConfigurationType)'=='StaticLibrary'">'''
+ result = source.replace(find, replace)
+
+ if result != source:
+ open(source_path, "w").write(result)
+ print "Patched %s." % source_path
+ return 0
+
+
+def main():
+ return patch_msbuild()
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/win/setup_cygwin_mount.py b/media/webrtc/trunk/build/win/setup_cygwin_mount.py
new file mode 100644
index 000000000..d68a3af41
--- /dev/null
+++ b/media/webrtc/trunk/build/win/setup_cygwin_mount.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def main():
+ if len(sys.argv) != 2 or sys.argv[1] != '--win-only':
+ return 1
+ if sys.platform in ('win32', 'cygwin'):
+ self_dir = os.path.dirname(sys.argv[0])
+ mount_path = os.path.join(self_dir, "../../third_party/cygwin")
+ batch_path = os.path.join(mount_path, "setup_mount.bat")
+ return os.system(os.path.normpath(batch_path) + ">nul")
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/media/webrtc/trunk/build/win_precompile.gypi b/media/webrtc/trunk/build/win_precompile.gypi
new file mode 100644
index 000000000..fb8607666
--- /dev/null
+++ b/media/webrtc/trunk/build/win_precompile.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file to make targets in your .gyp use the default
+# precompiled header on Windows, in debug builds only as the official
+# builders blow up (out of memory) if precompiled headers are used for
+# release builds.
+
+{
+ 'conditions': [
+ ['OS=="win" and chromium_win_pch==1', {
+ 'target_defaults': {
+ 'msvs_precompiled_header': '<(DEPTH)/build/precompile.h',
+ 'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc',
+ 'sources': ['<(DEPTH)/build/precompile.cc'],
+ }
+ }],
+ ],
+}