summaryrefslogtreecommitdiffstats
path: root/testing/mozbase
diff options
context:
space:
mode:
Diffstat (limited to 'testing/mozbase')
-rw-r--r--testing/mozbase/README.md21
-rw-r--r--testing/mozbase/docs/Makefile153
-rw-r--r--testing/mozbase/docs/_static/structured_example.py102
-rw-r--r--testing/mozbase/docs/conf.py258
-rw-r--r--testing/mozbase/docs/devicemanagement.rst11
-rw-r--r--testing/mozbase/docs/gettinginfo.rst13
-rw-r--r--testing/mozbase/docs/index.rst57
-rw-r--r--testing/mozbase/docs/loggingreporting.rst11
-rw-r--r--testing/mozbase/docs/make.bat190
-rw-r--r--testing/mozbase/docs/manifestparser.rst558
-rw-r--r--testing/mozbase/docs/mozcrash.rst8
-rw-r--r--testing/mozbase/docs/mozdebug.rst5
-rw-r--r--testing/mozbase/docs/mozdevice.rst254
-rw-r--r--testing/mozbase/docs/mozfile.rst10
-rw-r--r--testing/mozbase/docs/mozhttpd.rst22
-rw-r--r--testing/mozbase/docs/mozinfo.rst71
-rw-r--r--testing/mozbase/docs/mozinstall.rst29
-rw-r--r--testing/mozbase/docs/mozlog.rst486
-rw-r--r--testing/mozbase/docs/moznetwork.rst9
-rw-r--r--testing/mozbase/docs/mozprocess.rst324
-rw-r--r--testing/mozbase/docs/mozprofile.rst99
-rw-r--r--testing/mozbase/docs/mozrunner.rst177
-rw-r--r--testing/mozbase/docs/mozversion.rst112
-rw-r--r--testing/mozbase/docs/requirements.txt1
-rw-r--r--testing/mozbase/docs/setuprunning.rst18
-rw-r--r--testing/mozbase/manifestparser/manifestparser/__init__.py8
-rw-r--r--testing/mozbase/manifestparser/manifestparser/cli.py246
-rw-r--r--testing/mozbase/manifestparser/manifestparser/expression.py324
-rw-r--r--testing/mozbase/manifestparser/manifestparser/filters.py421
-rw-r--r--testing/mozbase/manifestparser/manifestparser/ini.py142
-rw-r--r--testing/mozbase/manifestparser/manifestparser/manifestparser.py804
-rw-r--r--testing/mozbase/manifestparser/setup.py27
-rw-r--r--testing/mozbase/manifestparser/tests/comment-example.ini11
-rw-r--r--testing/mozbase/manifestparser/tests/default-skipif.ini22
-rw-r--r--testing/mozbase/manifestparser/tests/default-suppfiles.ini9
-rw-r--r--testing/mozbase/manifestparser/tests/filter-example.ini11
-rw-r--r--testing/mozbase/manifestparser/tests/fleem1
-rw-r--r--testing/mozbase/manifestparser/tests/include-example.ini11
-rw-r--r--testing/mozbase/manifestparser/tests/include-invalid.ini1
-rw-r--r--testing/mozbase/manifestparser/tests/include/bar.ini4
-rw-r--r--testing/mozbase/manifestparser/tests/include/crash-handling1
-rw-r--r--testing/mozbase/manifestparser/tests/include/flowers1
-rw-r--r--testing/mozbase/manifestparser/tests/include/foo.ini5
-rw-r--r--testing/mozbase/manifestparser/tests/just-defaults.ini2
-rw-r--r--testing/mozbase/manifestparser/tests/manifest.ini11
-rw-r--r--testing/mozbase/manifestparser/tests/missing-path.ini2
-rw-r--r--testing/mozbase/manifestparser/tests/mozmill-example.ini80
-rw-r--r--testing/mozbase/manifestparser/tests/mozmill-restart-example.ini26
-rw-r--r--testing/mozbase/manifestparser/tests/no-tests.ini2
-rw-r--r--testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/parent/include/manifest.ini8
-rw-r--r--testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini5
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_1_server-root.ini5
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2_server-root.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini6
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_server-root.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_31
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_21
-rw-r--r--testing/mozbase/manifestparser/tests/parent/level_1/test_11
-rw-r--r--testing/mozbase/manifestparser/tests/parent/root/dummy0
-rw-r--r--testing/mozbase/manifestparser/tests/path-example.ini2
-rw-r--r--testing/mozbase/manifestparser/tests/relative-path.ini5
-rw-r--r--testing/mozbase/manifestparser/tests/subsuite.ini13
-rw-r--r--testing/mozbase/manifestparser/tests/test_chunking.py302
-rwxr-xr-xtesting/mozbase/manifestparser/tests/test_convert_directory.py181
-rwxr-xr-xtesting/mozbase/manifestparser/tests/test_convert_symlinks.py139
-rwxr-xr-xtesting/mozbase/manifestparser/tests/test_default_overrides.py115
-rwxr-xr-xtesting/mozbase/manifestparser/tests/test_expressionparser.py152
-rw-r--r--testing/mozbase/manifestparser/tests/test_filters.py182
-rwxr-xr-xtesting/mozbase/manifestparser/tests/test_manifestparser.py325
-rwxr-xr-xtesting/mozbase/manifestparser/tests/test_read_ini.py70
-rw-r--r--testing/mozbase/manifestparser/tests/test_testmanifest.py122
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini1
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js1
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js1
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js1
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js1
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini4
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini3
-rw-r--r--testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini5
-rw-r--r--testing/mozbase/moz.build38
-rw-r--r--testing/mozbase/mozcrash/mozcrash/__init__.py10
-rw-r--r--testing/mozbase/mozcrash/mozcrash/mozcrash.py557
-rw-r--r--testing/mozbase/mozcrash/setup.py29
-rw-r--r--testing/mozbase/mozcrash/tests/manifest.ini1
-rw-r--r--testing/mozbase/mozcrash/tests/test.py241
-rw-r--r--testing/mozbase/mozdebug/mozdebug/__init__.py31
-rwxr-xr-xtesting/mozbase/mozdebug/mozdebug/mozdebug.py291
-rw-r--r--testing/mozbase/mozdebug/setup.py27
-rw-r--r--testing/mozbase/mozdevice/adb_tests/test_device_running_adb_as_root.py48
-rw-r--r--testing/mozbase/mozdevice/adb_tests/test_devicemanagerADB.py219
-rwxr-xr-xtesting/mozbase/mozdevice/mozdevice/Zeroconf.py1560
-rw-r--r--testing/mozbase/mozdevice/mozdevice/__init__.py15
-rw-r--r--testing/mozbase/mozdevice/mozdevice/adb.py2271
-rw-r--r--testing/mozbase/mozdevice/mozdevice/adb_android.py493
-rw-r--r--testing/mozbase/mozdevice/mozdevice/adb_b2g.py122
-rw-r--r--testing/mozbase/mozdevice/mozdevice/devicemanager.py674
-rw-r--r--testing/mozbase/mozdevice/mozdevice/devicemanagerADB.py893
-rw-r--r--testing/mozbase/mozdevice/mozdevice/devicemanagerSUT.py975
-rw-r--r--testing/mozbase/mozdevice/mozdevice/dmcli.py382
-rw-r--r--testing/mozbase/mozdevice/mozdevice/droid.py263
-rw-r--r--testing/mozbase/mozdevice/mozdevice/sutini.py126
-rw-r--r--testing/mozbase/mozdevice/mozdevice/version_codes.py61
-rw-r--r--testing/mozbase/mozdevice/setup.py36
-rw-r--r--testing/mozbase/mozdevice/sut_tests/README.md15
-rw-r--r--testing/mozbase/mozdevice/sut_tests/dmunit.py55
-rw-r--r--testing/mozbase/mozdevice/sut_tests/genfiles.py85
-rw-r--r--testing/mozbase/mozdevice/sut_tests/runtests.py96
-rwxr-xr-xtesting/mozbase/mozdevice/sut_tests/setup-tools.sh10
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test-files/mytext.txt177
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test-files/smalltext.txt1
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test-files/test_script.sh1
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_datachannel.py53
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_exec.py24
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_exec_env.py32
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_fileExists.py37
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_getdir.py51
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_info.py20
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_prompt.py30
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_ps.py27
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_pull.py34
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_push1.py38
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_push2.py39
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_pushbinary.py19
-rw-r--r--testing/mozbase/mozdevice/sut_tests/test_pushsmalltext.py19
-rw-r--r--testing/mozbase/mozdevice/tests/droidsut_launch.py36
-rw-r--r--testing/mozbase/mozdevice/tests/manifest.ini23
-rw-r--r--testing/mozbase/mozdevice/tests/sut.py89
-rw-r--r--testing/mozbase/mozdevice/tests/sut_app.py20
-rw-r--r--testing/mozbase/mozdevice/tests/sut_basic.py73
-rw-r--r--testing/mozbase/mozdevice/tests/sut_chmod.py22
-rw-r--r--testing/mozbase/mozdevice/tests/sut_copytree.py67
-rw-r--r--testing/mozbase/mozdevice/tests/sut_fileExists.py29
-rw-r--r--testing/mozbase/mozdevice/tests/sut_fileMethods.py72
-rw-r--r--testing/mozbase/mozdevice/tests/sut_info.py49
-rw-r--r--testing/mozbase/mozdevice/tests/sut_ip.py37
-rw-r--r--testing/mozbase/mozdevice/tests/sut_kill.py24
-rw-r--r--testing/mozbase/mozdevice/tests/sut_list.py22
-rw-r--r--testing/mozbase/mozdevice/tests/sut_logcat.py52
-rw-r--r--testing/mozbase/mozdevice/tests/sut_mkdir.py78
-rw-r--r--testing/mozbase/mozdevice/tests/sut_movetree.py65
-rw-r--r--testing/mozbase/mozdevice/tests/sut_ps.py50
-rw-r--r--testing/mozbase/mozdevice/tests/sut_pull.py47
-rw-r--r--testing/mozbase/mozdevice/tests/sut_push.py88
-rw-r--r--testing/mozbase/mozdevice/tests/sut_remove.py24
-rw-r--r--testing/mozbase/mozdevice/tests/sut_time.py18
-rw-r--r--testing/mozbase/mozdevice/tests/sut_unpackfile.py23
-rw-r--r--testing/mozbase/mozfile/mozfile/__init__.py8
-rw-r--r--testing/mozbase/mozfile/mozfile/mozfile.py449
-rw-r--r--testing/mozbase/mozfile/setup.py25
-rw-r--r--testing/mozbase/mozfile/tests/files/missing_file_attributes.zipbin0 -> 442 bytes
-rw-r--r--testing/mozbase/mozfile/tests/manifest.ini6
-rw-r--r--testing/mozbase/mozfile/tests/stubs.py37
-rw-r--r--testing/mozbase/mozfile/tests/test_extract.py154
-rwxr-xr-xtesting/mozbase/mozfile/tests/test_load.py62
-rw-r--r--testing/mozbase/mozfile/tests/test_move_remove.py232
-rw-r--r--testing/mozbase/mozfile/tests/test_tempdir.py42
-rw-r--r--testing/mozbase/mozfile/tests/test_tempfile.py102
-rwxr-xr-xtesting/mozbase/mozfile/tests/test_url.py21
-rw-r--r--testing/mozbase/mozhttpd/mozhttpd/__init__.py48
-rw-r--r--testing/mozbase/mozhttpd/mozhttpd/handlers.py16
-rwxr-xr-xtesting/mozbase/mozhttpd/mozhttpd/mozhttpd.py330
-rw-r--r--testing/mozbase/mozhttpd/setup.py29
-rw-r--r--testing/mozbase/mozhttpd/tests/api.py266
-rw-r--r--testing/mozbase/mozhttpd/tests/baseurl.py19
-rw-r--r--testing/mozbase/mozhttpd/tests/basic.py46
-rw-r--r--testing/mozbase/mozhttpd/tests/filelisting.py43
-rw-r--r--testing/mozbase/mozhttpd/tests/manifest.ini6
-rw-r--r--testing/mozbase/mozhttpd/tests/paths.py77
-rw-r--r--testing/mozbase/mozhttpd/tests/requestlog.py41
-rw-r--r--testing/mozbase/mozinfo/mozinfo/__init__.py60
-rwxr-xr-xtesting/mozbase/mozinfo/mozinfo/mozinfo.py300
-rw-r--r--testing/mozbase/mozinfo/mozinfo/string_version.py43
-rw-r--r--testing/mozbase/mozinfo/setup.py31
-rw-r--r--testing/mozbase/mozinfo/tests/manifest.ini1
-rw-r--r--testing/mozbase/mozinfo/tests/test.py121
-rw-r--r--testing/mozbase/mozinstall/mozinstall/__init__.py6
-rwxr-xr-xtesting/mozbase/mozinstall/mozinstall/mozinstall.py342
-rw-r--r--testing/mozbase/mozinstall/setup.py53
-rw-r--r--testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.dmgbin0 -> 13441 bytes
-rw-r--r--testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.tar.bz2bin0 -> 2882 bytes
-rw-r--r--testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.zipbin0 -> 8707 bytes
-rw-r--r--testing/mozbase/mozinstall/tests/manifest.ini1
-rw-r--r--testing/mozbase/mozinstall/tests/test.py169
-rw-r--r--testing/mozbase/mozleak/mozleak/__init__.py11
-rw-r--r--testing/mozbase/mozleak/mozleak/leaklog.py205
-rw-r--r--testing/mozbase/mozleak/setup.py26
-rw-r--r--testing/mozbase/mozlog/mozlog/__init__.py30
-rw-r--r--testing/mozbase/mozlog/mozlog/commandline.py282
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/__init__.py23
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/base.py20
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/errorsummary.py69
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/html/__init__.py3
-rwxr-xr-xtesting/mozbase/mozlog/mozlog/formatters/html/html.py236
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/html/main.js172
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/html/style.css154
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py283
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/machformatter.py395
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/process.py55
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/tbplformatter.py244
-rwxr-xr-xtesting/mozbase/mozlog/mozlog/formatters/unittest.py60
-rw-r--r--testing/mozbase/mozlog/mozlog/formatters/xunit.py101
-rw-r--r--testing/mozbase/mozlog/mozlog/handlers/__init__.py11
-rw-r--r--testing/mozbase/mozlog/mozlog/handlers/base.py105
-rw-r--r--testing/mozbase/mozlog/mozlog/handlers/bufferhandler.py83
-rw-r--r--testing/mozbase/mozlog/mozlog/handlers/statushandler.py52
-rw-r--r--testing/mozbase/mozlog/mozlog/handlers/valgrindhandler.py140
-rw-r--r--testing/mozbase/mozlog/mozlog/logtypes.py204
-rw-r--r--testing/mozbase/mozlog/mozlog/proxy.py35
-rw-r--r--testing/mozbase/mozlog/mozlog/pytest_mozlog/__init__.py0
-rw-r--r--testing/mozbase/mozlog/mozlog/pytest_mozlog/plugin.py94
-rw-r--r--testing/mozbase/mozlog/mozlog/reader.py77
-rw-r--r--testing/mozbase/mozlog/mozlog/scripts/__init__.py32
-rw-r--r--testing/mozbase/mozlog/mozlog/scripts/format.py42
-rw-r--r--testing/mozbase/mozlog/mozlog/scripts/logmerge.py82
-rw-r--r--testing/mozbase/mozlog/mozlog/scripts/unstable.py120
-rw-r--r--testing/mozbase/mozlog/mozlog/stdadapter.py45
-rw-r--r--testing/mozbase/mozlog/mozlog/structuredlog.py521
-rw-r--r--testing/mozbase/mozlog/mozlog/unstructured/__init__.py8
-rw-r--r--testing/mozbase/mozlog/mozlog/unstructured/logger.py185
-rw-r--r--testing/mozbase/mozlog/mozlog/unstructured/loggingmixin.py44
-rw-r--r--testing/mozbase/mozlog/mozlog/unstructured/loglistener.py50
-rw-r--r--testing/mozbase/mozlog/setup.py39
-rw-r--r--testing/mozbase/mozlog/tests/manifest.ini2
-rw-r--r--testing/mozbase/mozlog/tests/test_logger.py264
-rw-r--r--testing/mozbase/mozlog/tests/test_structured.py1098
-rw-r--r--testing/mozbase/moznetwork/moznetwork/__init__.py26
-rw-r--r--testing/mozbase/moznetwork/moznetwork/moznetwork.py172
-rw-r--r--testing/mozbase/moznetwork/setup.py29
-rw-r--r--testing/mozbase/moznetwork/tests/manifest.ini1
-rw-r--r--testing/mozbase/moznetwork/tests/test.py85
-rw-r--r--testing/mozbase/mozprocess/mozprocess/__init__.py8
-rw-r--r--testing/mozbase/mozprocess/mozprocess/processhandler.py1079
-rw-r--r--testing/mozbase/mozprocess/mozprocess/qijo.py166
-rw-r--r--testing/mozbase/mozprocess/mozprocess/winprocess.py479
-rw-r--r--testing/mozbase/mozprocess/setup.py33
-rw-r--r--testing/mozbase/mozprocess/tests/Makefile55
-rw-r--r--testing/mozbase/mozprocess/tests/infinite_loop.py18
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/AUTHORS6
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/INSTALL15
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/LICENSE21
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/Makefile85
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/README12
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/dictionary.c407
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/dictionary.h176
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/iniparser.c648
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/iniparser.h273
-rw-r--r--testing/mozbase/mozprocess/tests/iniparser/platform.mk8
-rw-r--r--testing/mozbase/mozprocess/tests/manifest.ini18
-rw-r--r--testing/mozbase/mozprocess/tests/proccountfive.py2
-rw-r--r--testing/mozbase/mozprocess/tests/process_normal_broad_python.ini30
-rw-r--r--testing/mozbase/mozprocess/tests/process_normal_deep_python.ini65
-rw-r--r--testing/mozbase/mozprocess/tests/process_normal_finish.ini11
-rw-r--r--testing/mozbase/mozprocess/tests/process_normal_finish_no_process_group.ini2
-rw-r--r--testing/mozbase/mozprocess/tests/process_normal_finish_python.ini17
-rw-r--r--testing/mozbase/mozprocess/tests/process_waittimeout.ini11
-rw-r--r--testing/mozbase/mozprocess/tests/process_waittimeout_10s.ini8
-rw-r--r--testing/mozbase/mozprocess/tests/process_waittimeout_10s_python.ini16
-rw-r--r--testing/mozbase/mozprocess/tests/process_waittimeout_python.ini16
-rw-r--r--testing/mozbase/mozprocess/tests/proclaunch.c156
-rw-r--r--testing/mozbase/mozprocess/tests/proclaunch.py199
-rw-r--r--testing/mozbase/mozprocess/tests/procnonewline.py3
-rw-r--r--testing/mozbase/mozprocess/tests/proctest.py52
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess.py235
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_kill.py91
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_kill_broad_wait.py33
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_misc.py41
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_output.py57
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_params.py84
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_poll.py106
-rw-r--r--testing/mozbase/mozprocess/tests/test_mozprocess_wait.py96
-rw-r--r--testing/mozbase/mozprocess/tests/test_process_reader.py101
-rw-r--r--testing/mozbase/mozprofile/mozprofile/__init__.py21
-rw-r--r--testing/mozbase/mozprofile/mozprofile/addons.py410
-rwxr-xr-xtesting/mozbase/mozprofile/mozprofile/cli.py131
-rw-r--r--testing/mozbase/mozprofile/mozprofile/diff.py81
-rw-r--r--testing/mozbase/mozprofile/mozprofile/permissions.py415
-rw-r--r--testing/mozbase/mozprofile/mozprofile/prefs.py232
-rw-r--r--testing/mozbase/mozprofile/mozprofile/profile.py454
-rw-r--r--testing/mozbase/mozprofile/mozprofile/view.py43
-rw-r--r--testing/mozbase/mozprofile/mozprofile/webapps.py281
-rw-r--r--testing/mozbase/mozprofile/setup.py45
-rw-r--r--testing/mozbase/mozprofile/tests/addon_stubs.py78
-rwxr-xr-xtesting/mozbase/mozprofile/tests/addonid.py184
-rw-r--r--testing/mozbase/mozprofile/tests/addons/empty.xpibin0 -> 530 bytes
-rw-r--r--testing/mozbase/mozprofile/tests/addons/empty/install.rdf20
-rw-r--r--testing/mozbase/mozprofile/tests/addons/invalid.xpibin0 -> 564 bytes
-rwxr-xr-xtesting/mozbase/mozprofile/tests/bug758250.py53
-rwxr-xr-xtesting/mozbase/mozprofile/tests/bug785146.py51
-rw-r--r--testing/mozbase/mozprofile/tests/files/not_an_addon.txt0
-rw-r--r--testing/mozbase/mozprofile/tests/files/prefs_with_comments.js6
-rw-r--r--testing/mozbase/mozprofile/tests/files/prefs_with_interpolation.js4
-rw-r--r--testing/mozbase/mozprofile/tests/files/webapps1.json50
-rw-r--r--testing/mozbase/mozprofile/tests/files/webapps2.json37
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_1.rdf21
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_2.rdf21
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_3.rdf22
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_4.rdf22
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_no_id.rdf22
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_not_wellformed.rdf23
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_version.rdf23
-rw-r--r--testing/mozbase/mozprofile/tests/install_manifests/test_addon_unpack.rdf22
-rw-r--r--testing/mozbase/mozprofile/tests/manifest.ini12
-rwxr-xr-xtesting/mozbase/mozprofile/tests/permissions.py199
-rw-r--r--testing/mozbase/mozprofile/tests/server_locations.py151
-rw-r--r--testing/mozbase/mozprofile/tests/test_addons.py415
-rw-r--r--testing/mozbase/mozprofile/tests/test_clone_cleanup.py63
-rwxr-xr-xtesting/mozbase/mozprofile/tests/test_nonce.py49
-rwxr-xr-xtesting/mozbase/mozprofile/tests/test_preferences.py378
-rw-r--r--testing/mozbase/mozprofile/tests/test_profile.py30
-rw-r--r--testing/mozbase/mozprofile/tests/test_profile_view.py75
-rwxr-xr-xtesting/mozbase/mozprofile/tests/test_webapps.py202
-rw-r--r--testing/mozbase/mozrunner/mozrunner/__init__.py11
-rw-r--r--testing/mozbase/mozrunner/mozrunner/application.py265
-rw-r--r--testing/mozbase/mozrunner/mozrunner/base/__init__.py5
-rw-r--r--testing/mozbase/mozrunner/mozrunner/base/browser.py80
-rw-r--r--testing/mozbase/mozrunner/mozrunner/base/device.py185
-rw-r--r--testing/mozbase/mozrunner/mozrunner/base/runner.py233
-rw-r--r--testing/mozbase/mozrunner/mozrunner/cli.py152
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/__init__.py13
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/android_device.py773
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/autophone.py651
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/base.py306
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/emulator.py288
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/emulator_battery.py53
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/emulator_geo.py17
-rw-r--r--testing/mozbase/mozrunner/mozrunner/devices/emulator_screen.py89
-rw-r--r--testing/mozbase/mozrunner/mozrunner/errors.py16
-rw-r--r--testing/mozbase/mozrunner/mozrunner/resources/metrotestharness.exebin0 -> 63488 bytes
-rw-r--r--testing/mozbase/mozrunner/mozrunner/runners.py211
-rwxr-xr-xtesting/mozbase/mozrunner/mozrunner/utils.py279
-rw-r--r--testing/mozbase/mozrunner/setup.py54
-rw-r--r--testing/mozbase/mozrunner/tests/manifest.ini7
-rw-r--r--testing/mozbase/mozrunner/tests/mozrunnertest.py34
-rw-r--r--testing/mozbase/mozrunner/tests/test_crash.py37
-rw-r--r--testing/mozbase/mozrunner/tests/test_interactive.py53
-rw-r--r--testing/mozbase/mozrunner/tests/test_start.py45
-rw-r--r--testing/mozbase/mozrunner/tests/test_states.py18
-rw-r--r--testing/mozbase/mozrunner/tests/test_stop.py39
-rw-r--r--testing/mozbase/mozrunner/tests/test_threads.py73
-rw-r--r--testing/mozbase/mozrunner/tests/test_wait.py29
-rw-r--r--testing/mozbase/mozscreenshot/mozscreenshot/__init__.py61
-rw-r--r--testing/mozbase/mozscreenshot/setup.py26
-rw-r--r--testing/mozbase/mozsystemmonitor/README.rst13
-rw-r--r--testing/mozbase/mozsystemmonitor/mozsystemmonitor/__init__.py0
-rw-r--r--testing/mozbase/mozsystemmonitor/mozsystemmonitor/resourcemonitor.py676
-rw-r--r--testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/__init__.py0
-rw-r--r--testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/test_resource_monitor.py180
-rw-r--r--testing/mozbase/mozsystemmonitor/setup.py29
-rw-r--r--testing/mozbase/moztest/moztest/__init__.py7
-rw-r--r--testing/mozbase/moztest/moztest/adapters/__init__.py7
-rw-r--r--testing/mozbase/moztest/moztest/adapters/unit.py225
-rw-r--r--testing/mozbase/moztest/moztest/output/__init__.py0
-rw-r--r--testing/mozbase/moztest/moztest/output/autolog.py73
-rw-r--r--testing/mozbase/moztest/moztest/output/base.py53
-rw-r--r--testing/mozbase/moztest/moztest/output/xunit.py93
-rw-r--r--testing/mozbase/moztest/moztest/results.py323
-rw-r--r--testing/mozbase/moztest/setup.py26
-rw-r--r--testing/mozbase/moztest/tests/manifest.ini1
-rw-r--r--testing/mozbase/moztest/tests/test.py55
-rw-r--r--testing/mozbase/mozversion/mozversion/__init__.py7
-rw-r--r--testing/mozbase/mozversion/mozversion/errors.py30
-rw-r--r--testing/mozbase/mozversion/mozversion/mozversion.py340
-rw-r--r--testing/mozbase/mozversion/setup.py29
-rw-r--r--testing/mozbase/mozversion/tests/manifest.ini4
-rw-r--r--testing/mozbase/mozversion/tests/test_apk.py43
-rw-r--r--testing/mozbase/mozversion/tests/test_b2g.py75
-rw-r--r--testing/mozbase/mozversion/tests/test_binary.py177
-rw-r--r--testing/mozbase/mozversion/tests/test_sources.py85
-rw-r--r--testing/mozbase/packages.txt19
-rwxr-xr-xtesting/mozbase/setup_development.py273
-rw-r--r--testing/mozbase/test-manifest.ini24
-rwxr-xr-xtesting/mozbase/test.py104
-rwxr-xr-xtesting/mozbase/versioninfo.py132
377 files changed, 43767 insertions, 0 deletions
diff --git a/testing/mozbase/README.md b/testing/mozbase/README.md
new file mode 100644
index 000000000..4e2cabfe5
--- /dev/null
+++ b/testing/mozbase/README.md
@@ -0,0 +1,21 @@
+# Mozbase
+
+Mozbase is a set of easy-to-use Python packages forming a supplemental standard
+library for Mozilla. It provides consistency and reduces redundancy in
+automation and other system-level software. All of Mozilla's test harnesses use
+mozbase to some degree, including Talos, mochitest, reftest, Autophone, and
+Eideticker.
+
+Learn more about mozbase at the [project page][].
+
+Read [detailed docs][] online, or build them locally by running "make html" in
+the docs directory.
+
+Consult [open][] [bugs][] and feel free to file [new bugs][].
+
+
+[project page]: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase
+[detailed docs]: http://mozbase.readthedocs.org/
+[open]: https://bugzilla.mozilla.org/buglist.cgi?resolution=---&component=Mozbase&product=Testing
+[bugs]: https://bugzilla.mozilla.org/buglist.cgi?resolution=---&status_whiteboard_type=allwordssubstr&query_format=advanced&status_whiteboard=mozbase
+[new bugs]: https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=Mozbase
diff --git a/testing/mozbase/docs/Makefile b/testing/mozbase/docs/Makefile
new file mode 100644
index 000000000..386a52db1
--- /dev/null
+++ b/testing/mozbase/docs/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MozBase.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MozBase.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/MozBase"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MozBase"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/mozbase/docs/_static/structured_example.py b/testing/mozbase/docs/_static/structured_example.py
new file mode 100644
index 000000000..2bbc03810
--- /dev/null
+++ b/testing/mozbase/docs/_static/structured_example.py
@@ -0,0 +1,102 @@
+import argparse
+import sys
+import traceback
+import types
+
+from mozlog import commandline, get_default_logger
+
+
+class TestAssertion(Exception):
+ pass
+
+
+def assert_equals(a, b):
+ if a != b:
+ raise TestAssertion("%r not equal to %r" % (a, b))
+
+
+def expected(status):
+ def inner(f):
+ def test_func():
+ f()
+ test_func.__name__ = f.__name__
+ test_func._expected = status
+ return test_func
+ return inner
+
+
+def test_that_passes():
+ assert_equals(1, int("1"))
+
+
+def test_that_fails():
+ assert_equals(1, int("2"))
+
+
+def test_that_has_an_error():
+ assert_equals(2, 1 + "1")
+
+
+@expected("FAIL")
+def test_expected_fail():
+ assert_equals(2 + 2, 5)
+
+
+class TestRunner(object):
+
+ def __init__(self):
+ self.logger = get_default_logger(component='TestRunner')
+
+ def gather_tests(self):
+ for item in globals().itervalues():
+ if isinstance(item, types.FunctionType) and item.__name__.startswith("test_"):
+ yield item.__name__, item
+
+ def run(self):
+ tests = list(self.gather_tests())
+
+ self.logger.suite_start(tests=[name for name, func in tests])
+ self.logger.info("Running tests")
+ for name, func in tests:
+ self.run_test(name, func)
+ self.logger.suite_end()
+
+ def run_test(self, name, func):
+ self.logger.test_start(name)
+ status = None
+ message = None
+ expected = func._expected if hasattr(func, "_expected") else "PASS"
+ try:
+ func()
+ except TestAssertion as e:
+ status = "FAIL"
+ message = e.message
+ except:
+ status = "ERROR"
+ message = traceback.format_exc()
+ else:
+ status = "PASS"
+ self.logger.test_end(name, status=status, expected=expected, message=message)
+
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ return parser
+
+
+def main():
+ parser = get_parser()
+ commandline.add_logging_group(parser)
+
+ args = parser.parse_args()
+
+ logger = commandline.setup_logging("structured-example", args, {"raw": sys.stdout})
+
+ runner = TestRunner()
+ try:
+ runner.run()
+ except:
+ logger.critical("Error during test run:\n%s" % traceback.format_exc())
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/mozbase/docs/conf.py b/testing/mozbase/docs/conf.py
new file mode 100644
index 000000000..95d6de64b
--- /dev/null
+++ b/testing/mozbase/docs/conf.py
@@ -0,0 +1,258 @@
+# -*- coding: utf-8 -*-
+#
+# MozBase documentation build configuration file, created by
+# sphinx-quickstart on Mon Oct 22 14:02:17 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+here = os.path.dirname(os.path.abspath(__file__))
+parent = os.path.dirname(here)
+for item in os.listdir(parent):
+ path = os.path.join(parent, item)
+ if (not os.path.isdir(path)) or (not os.path.exists(os.path.join(path, 'setup.py'))):
+ continue
+ sys.path.insert(0, path)
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'MozBase'
+copyright = u'2012, Mozilla Automation and Tools team'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '1'
+# The full version, including alpha/beta/rc tags.
+release = '1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+if not on_rtd:
+ try:
+ import sphinx_rtd_theme
+ html_theme = 'sphinx_rtd_theme'
+ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+ except ImportError:
+ pass
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+# html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+html_title = "mozbase documentation"
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'MozBasedoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ # 'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'MozBase.tex', u'MozBase Documentation',
+ u'Mozilla Automation and Tools team', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'mozbase', u'MozBase Documentation',
+ [u'Mozilla Automation and Tools team'], 1)
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'MozBase', u'MozBase Documentation',
+ u'Mozilla Automation and Tools team', 'MozBase', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
diff --git a/testing/mozbase/docs/devicemanagement.rst b/testing/mozbase/docs/devicemanagement.rst
new file mode 100644
index 000000000..80c4af5bd
--- /dev/null
+++ b/testing/mozbase/docs/devicemanagement.rst
@@ -0,0 +1,11 @@
+Device management
+-----------------
+
+Mozbase provides a module called `mozdevice` for the purposes of
+running automated tests or scripts on a device (e.g. an Android- or
+FirefoxOS-based phone) connected to a workstation.
+
+.. toctree::
+ :maxdepth: 3
+
+ mozdevice
diff --git a/testing/mozbase/docs/gettinginfo.rst b/testing/mozbase/docs/gettinginfo.rst
new file mode 100644
index 000000000..35c4c4508
--- /dev/null
+++ b/testing/mozbase/docs/gettinginfo.rst
@@ -0,0 +1,13 @@
+Getting information on the system under test
+============================================
+
+It's often necessary to get some information about the system we're
+testing, for example to turn on or off some platform specific
+behaviour.
+
+.. toctree::
+ :maxdepth: 2
+
+ mozinfo
+ moznetwork
+ mozversion
diff --git a/testing/mozbase/docs/index.rst b/testing/mozbase/docs/index.rst
new file mode 100644
index 000000000..86da2a14b
--- /dev/null
+++ b/testing/mozbase/docs/index.rst
@@ -0,0 +1,57 @@
+.. MozBase documentation master file, created by
+ sphinx-quickstart on Mon Oct 22 14:02:17 2012.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+mozbase
+=======
+
+Mozbase is a set of easy-to-use Python packages forming a supplemental standard
+library for Mozilla. It provides consistency and reduces redundancy in
+automation and other system-level software. All of Mozilla's test harnesses use
+mozbase to some degree, including Talos_, mochitest_, reftest_, Autophone_, and
+Eideticker_.
+
+.. _Talos: https://wiki.mozilla.org/Talos
+
+.. _mochitest: https://developer.mozilla.org/en-US/docs/Mochitest
+
+.. _reftest: https://developer.mozilla.org/en-US/docs/Creating_reftest-based_unit_tests
+
+.. _Autophone: https://wiki.mozilla.org/Auto-tools/Projects/AutoPhone
+
+.. _Eideticker: https://wiki.mozilla.org/Project_Eideticker
+
+In the course of writing automated tests at Mozilla, we found that
+the same tasks came up over and over, regardless of the specific nature of
+what we were testing. We figured that consolidating this code into a set of
+libraries would save us a good deal of time, and so we spent some effort
+factoring out the best-of-breed automation code into something we named
+"mozbase" (usually written all in lower case except at the beginning of a
+sentence).
+
+This is the main documentation for users of mozbase. There is also a
+project_ wiki page with notes on development practices and administration.
+
+.. _project: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase
+
+The documentation is organized by category, then by module. Figure out what you
+want to do then dive in!
+
+.. toctree::
+ :maxdepth: 2
+
+ manifestparser
+ gettinginfo
+ setuprunning
+ mozhttpd
+ loggingreporting
+ devicemanagement
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/testing/mozbase/docs/loggingreporting.rst b/testing/mozbase/docs/loggingreporting.rst
new file mode 100644
index 000000000..a8561a49b
--- /dev/null
+++ b/testing/mozbase/docs/loggingreporting.rst
@@ -0,0 +1,11 @@
+Logging and reporting
+=====================
+
+Ideally output between different types of testing system should be as
+uniform as possible, as well as making it easy to make things more or
+less verbose. We created some libraries to make doing this easy.
+
+.. toctree::
+ :maxdepth: 2
+
+ mozlog
diff --git a/testing/mozbase/docs/make.bat b/testing/mozbase/docs/make.bat
new file mode 100644
index 000000000..d67c86ae9
--- /dev/null
+++ b/testing/mozbase/docs/make.bat
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\MozBase.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\MozBase.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/testing/mozbase/docs/manifestparser.rst b/testing/mozbase/docs/manifestparser.rst
new file mode 100644
index 000000000..e93317b40
--- /dev/null
+++ b/testing/mozbase/docs/manifestparser.rst
@@ -0,0 +1,558 @@
+Managing lists of tests
+=======================
+
+.. py:currentmodule:: manifestparser
+
+We don't always want to run all tests, all the time. Sometimes a test
+may be broken, in other cases we only want to run a test on a specific
+platform or build of Mozilla. To handle these cases (and more), we
+created a python library to create and use test "manifests", which
+codify this information.
+
+:mod:`manifestparser` --- Create and manage test manifests
+-----------------------------------------------------------
+
+manifestparser lets you easily create and use test manifests, to
+control which tests are run under what circumstances.
+
+What manifestparser gives you:
+
+* manifests are ordered lists of tests
+* tests may have an arbitrary number of key, value pairs
+* the parser returns an ordered list of test data structures, which
+ are just dicts with some keys. For example, a test with no
+ user-specified metadata looks like this:
+
+.. code-block:: text
+
+ [{'expected': 'pass',
+ 'path': '/home/mozilla/mozmill/src/manifestparser/manifestparser/tests/testToolbar/testBackForwardButtons.js',
+ 'relpath': 'testToolbar/testBackForwardButtons.js',
+ 'name': 'testBackForwardButtons.js',
+ 'here': '/home/mozilla/mozmill/src/manifestparser/manifestparser/tests',
+ 'manifest': '/home/mozilla/mozmill/src/manifestparser/manifestparser/tests/manifest.ini',}]
+
+The keys displayed here (path, relpath, name, here, and manifest) are
+reserved keys for manifestparser and any consuming APIs. You can add
+additional key, value metadata to each test.
+
+Why have test manifests?
+````````````````````````
+
+It is desirable to have a unified format for test manifests for testing
+[mozilla-central](http://hg.mozilla.org/mozilla-central), etc.
+
+* It is desirable to be able to selectively enable or disable tests based on platform or other conditions. This should be easy to do. Currently, since many of the harnesses just crawl directories, there is no effective way of disabling a test except for removal from mozilla-central
+* It is desriable to do this in a universal way so that enabling and disabling tests as well as other tasks are easily accessible to a wider audience than just those intimately familiar with the specific test framework.
+* It is desirable to have other metadata on top of the test. For instance, let's say a test is marked as skipped. It would be nice to give the reason why.
+
+
+Most Mozilla test harnesses work by crawling a directory structure.
+While this is straight-forward, manifests offer several practical
+advantages:
+
+* ability to turn a test off easily: if a test is broken on m-c
+ currently, the only way to turn it off, generally speaking, is just
+ removing the test. Often this is undesirable, as if the test should
+ be dismissed because other people want to land and it can't be
+ investigated in real time (is it a failure? is the test bad? is no
+ one around that knows the test?), then backing out a test is at best
+ problematic. With a manifest, a test may be disabled without
+ removing it from the tree and a bug filed with the appropriate
+ reason:
+
+.. code-block:: text
+
+ [test_broken.js]
+ disabled = https://bugzilla.mozilla.org/show_bug.cgi?id=123456
+
+* ability to run different (subsets of) tests on different
+ platforms. Traditionally, we've done a bit of magic or had the test
+ know what platform it would or would not run on. With manifests, you
+ can mark what platforms a test will or will not run on and change
+ these without changing the test.
+
+.. code-block:: text
+
+ [test_works_on_windows_only.js]
+ skip-if = os != 'win'
+
+* ability to markup tests with metadata. We have a large, complicated,
+ and always changing infrastructure. key, value metadata may be used
+ as an annotation to a test and appropriately curated and mined. For
+ instance, we could mark certain tests as randomorange with a bug
+ number, if it were desirable.
+
+* ability to have sane and well-defined test-runs. You can keep
+ different manifests for different test runs and ``[include:]``
+ (sub)manifests as appropriate to your needs.
+
+Manifest Format
+````````
+
+Manifests are .ini file with the section names denoting the path
+relative to the manifest:
+
+.. code-block:: text
+
+ [foo.js]
+ [bar.js]
+ [fleem.js]
+
+The sections are read in order. In addition, tests may include
+arbitrary key, value metadata to be used by the harness. You may also
+have a `[DEFAULT]` section that will give key, value pairs that will
+be inherited by each test unless overridden:
+
+.. code-block:: text
+
+ [DEFAULT]
+ type = restart
+
+ [lilies.js]
+ color = white
+
+ [daffodils.js]
+ color = yellow
+ type = other
+ # override type from DEFAULT
+
+ [roses.js]
+ color = red
+
+You can also include other manifests:
+
+.. code-block:: text
+
+ [include:subdir/anothermanifest.ini]
+
+And reference parent manifests to inherit keys and values from the DEFAULT
+section, without adding possible included tests.
+
+.. code-block:: text
+
+ [parent:../manifest.ini]
+
+Manifests are included relative to the directory of the manifest with
+the `[include:]` directive unless they are absolute paths.
+
+By default you can use both '#' and ';' as comment characters. Comments
+must start on a new line, inline comments are not supported.
+
+.. code-block:: text
+
+ [roses.js]
+ # a valid comment
+ ; another valid comment
+ color = red # not a valid comment
+
+In the example above, the 'color' property will have the value 'red #
+not a valid comment'.
+
+Special variable server-root
+````````````````````````````
+There is a special variable called `server-root` used for paths on the system.
+This variable is deemed a path and will be expanded into its absolute form.
+
+Because of the inheritant nature of the key/value pairs, if one requires a
+system path, it must be absolute for it to be of any use in any included file.
+
+.. code-block:: text
+
+ [DEFAULTS]
+ server-root = ../data
+
+ [test1.js]
+ server-root = test1/data
+
+Manifest Conditional Expressions
+````````````````````````````````
+The conditional expressions used in manifests are parsed using the *ExpressionParser* class.
+
+.. autoclass:: manifestparser.ExpressionParser
+
+Consumers of this module are expected to pass in a value dictionary
+for evaluating conditional expressions. A common pattern is to pass
+the dictionary from the :mod:`mozinfo` module.
+
+Data
+````
+
+Manifest Destiny gives tests as a list of dictionaries (in python
+terms).
+
+* path: full path to the test
+* relpath: relative path starting from the root directory. The root directory
+ is typically the location of the root manifest, or the source
+ repository. It can be specified at runtime by passing in `rootdir`
+ to `TestManifest`. Defaults to the directory containing the test's
+ ancestor manifest.
+* name: file name of the test
+* here: the parent directory of the manifest
+* manifest: the path to the manifest containing the test
+
+This data corresponds to a one-line manifest:
+
+.. code-block:: text
+
+ [testToolbar/testBackForwardButtons.js]
+
+If additional key, values were specified, they would be in this dict
+as well.
+
+Outside of the reserved keys, the remaining key, values
+are up to convention to use. There is a (currently very minimal)
+generic integration layer in manifestparser for use of all harnesses,
+`manifestparser.TestManifest`.
+For instance, if the 'disabled' key is present, you can get the set of
+tests without disabled (various other queries are doable as well).
+
+Since the system is convention-based, the harnesses may do whatever
+they want with the data. They may ignore it completely, they may use
+the provided integration layer, or they may provide their own
+integration layer. This should allow whatever sort of logic is
+desired. For instance, if in yourtestharness you wanted to run only on
+mondays for a certain class of tests:
+
+.. code-block:: text
+
+ tests = []
+ for test in manifests.tests:
+ if 'runOnDay' in test:
+ if calendar.day_name[calendar.weekday(*datetime.datetime.now().timetuple()[:3])].lower() == test['runOnDay'].lower():
+ tests.append(test)
+ else:
+ tests.append(test)
+
+To recap:
+* the manifests allow you to specify test data
+* the parser gives you this data
+* you can use it however you want or process it further as you need
+
+Tests are denoted by sections in an .ini file (see
+http://hg.mozilla.org/automation/manifestparser/file/tip/manifestparser/tests/mozmill-example.ini).
+
+Additional manifest files may be included with an `[include:]` directive:
+
+.. code-block:: text
+
+ [include:path-to-additional-file.manifest]
+
+The path to included files is relative to the current manifest.
+
+The `[DEFAULT]` section contains variables that all tests inherit from.
+
+Included files will inherit the top-level variables but may override
+in their own `[DEFAULT]` section.
+
+manifestparser Architecture
+````````````````````````````
+
+There is a two- or three-layered approach to the manifestparser
+architecture, depending on your needs:
+
+1. ManifestParser: this is a generic parser for .ini manifests that
+facilitates the `[include:]` logic and the inheritence of
+metadata. Despite the internal variable being called `self.tests`
+(an oversight), this layer has nothing in particular to do with tests.
+
+2. TestManifest: this is a harness-agnostic integration layer that is
+test-specific. TestManifest faciliates `skip-if` logic.
+
+3. Optionally, a harness will have an integration layer than inherits
+from TestManifest if more harness-specific customization is desired at
+the manifest level.
+
+See the source code at https://github.com/mozilla/mozbase/tree/master/manifestparser
+and
+https://github.com/mozilla/mozbase/blob/master/manifestparser/manifestparser.py
+in particular.
+
+Filtering Manifests
+```````````````````
+
+After creating a `TestManifest` object, all manifest files are read and a list
+of test objects can be accessed via `TestManifest.tests`. However this list contains
+all test objects, whether they should be run or not. Normally they need to be
+filtered down only to the set of tests that should be run by the test harness.
+
+To do this, a test harness can call `TestManifest.active_tests`:
+
+.. code-block:: python
+
+ tests = manifest.active_tests(exists=True, disabled=True, **tags)
+
+By default, `active_tests` runs the filters found in
+:attr:`~.DEFAULT_FILTERS`. It also accepts two convenience arguments:
+
+1. `exists`: if True (default), filter out tests that do not exist on the local file system.
+2. `disabled`: if True (default), do not filter out tests containing the 'disabled' key
+ (which can be set by `skip-if` manually).
+
+This works for simple cases, but there are other built-in filters, or even custom filters
+that can be applied to the `TestManifest`. To do so, add the filter to `TestManifest.filters`:
+
+.. code-block:: python
+
+ from manifestparser.filters import subsuite
+ import mozinfo
+
+ filters = [subsuite('devtools')]
+ tests = manifest.active_tests(filters=filters, **mozinfo.info)
+
+.. automodule:: manifestparser.filters
+ :members:
+ :exclude-members: filterlist,InstanceFilter,DEFAULT_FILTERS
+
+.. autodata:: manifestparser.filters.DEFAULT_FILTERS
+ :annotation:
+
+For example, suppose we want to introduce a new key called `timeout-if` that adds a
+'timeout' property to a test if a certain condition is True. The syntax in the manifest
+files will look like this:
+
+.. code-block:: text
+
+ [test_foo.py]
+ timeout-if = 300, os == 'win'
+
+The value is <timeout>, <condition> where condition is the same format as the one in
+`skip-if`. In the above case, if os == 'win', a timeout of 300 seconds will be
+applied. Otherwise, no timeout will be applied. All we need to do is define the filter
+and add it:
+
+.. code-block:: python
+
+ from manifestparser.expression import parse
+ import mozinfo
+
+ def timeout_if(tests, values):
+ for test in tests:
+ if 'timeout-if' in test:
+ timeout, condition = test['timeout-if'].split(',', 1)
+ if parse(condition, **values):
+ test['timeout'] = timeout
+ yield test
+
+ tests = manifest.active_tests(filters=[timeout_if], **mozinfo.info)
+
+Creating Manifests
+``````````````````
+
+manifestparser comes with a console script, `manifestparser create`, that
+may be used to create a seed manifest structure from a directory of
+files. Run `manifestparser help create` for usage information.
+
+Copying Manifests
+`````````````````
+
+To copy tests and manifests from a source:
+
+.. code-block:: text
+
+ manifestparser [options] copy from_manifest to_directory -tag1 -tag2 `key1=value1 key2=value2 ...
+
+Updating Tests
+``````````````
+
+To update the tests associated with with a manifest from a source
+directory:
+
+.. code-block:: text
+
+ manifestparser [options] update manifest from_directory -tag1 -tag2 `key1=value1 `key2=value2 ...
+
+Usage example
+`````````````
+
+Here is an example of how to create manifests for a directory tree and
+update the tests listed in the manifests from an external source.
+
+Creating Manifests
+``````````````````
+
+Let's say you want to make a series of manifests for a given directory structure containing `.js` test files:
+
+.. code-block:: text
+
+ testing/mozmill/tests/firefox/
+ testing/mozmill/tests/firefox/testAwesomeBar/
+ testing/mozmill/tests/firefox/testPreferences/
+ testing/mozmill/tests/firefox/testPrivateBrowsing/
+ testing/mozmill/tests/firefox/testSessionStore/
+ testing/mozmill/tests/firefox/testTechnicalTools/
+ testing/mozmill/tests/firefox/testToolbar/
+ testing/mozmill/tests/firefox/restartTests
+
+You can use `manifestparser create` to do this:
+
+.. code-block:: text
+
+ $ manifestparser help create
+ Usage: manifestparser.py [options] create directory <directory> <...>
+
+ create a manifest from a list of directories
+
+ Options:
+ -p PATTERN, `pattern=PATTERN
+ glob pattern for files
+ -i IGNORE, `ignore=IGNORE
+ directories to ignore
+ -w IN_PLACE, --in-place=IN_PLACE
+ Write .ini files in place; filename to write to
+
+We only want `.js` files and we want to skip the `restartTests` directory.
+We also want to write a manifest per directory, so I use the `--in-place`
+option to write the manifests:
+
+.. code-block:: text
+
+ manifestparser create . -i restartTests -p '*.js' -w manifest.ini
+
+This creates a manifest.ini per directory that we care about with the JS test files:
+
+.. code-block:: text
+
+ testing/mozmill/tests/firefox/manifest.ini
+ testing/mozmill/tests/firefox/testAwesomeBar/manifest.ini
+ testing/mozmill/tests/firefox/testPreferences/manifest.ini
+ testing/mozmill/tests/firefox/testPrivateBrowsing/manifest.ini
+ testing/mozmill/tests/firefox/testSessionStore/manifest.ini
+ testing/mozmill/tests/firefox/testTechnicalTools/manifest.ini
+ testing/mozmill/tests/firefox/testToolbar/manifest.ini
+
+The top-level `manifest.ini` merely has `[include:]` references to the sub manifests:
+
+.. code-block:: text
+
+ [include:testAwesomeBar/manifest.ini]
+ [include:testPreferences/manifest.ini]
+ [include:testPrivateBrowsing/manifest.ini]
+ [include:testSessionStore/manifest.ini]
+ [include:testTechnicalTools/manifest.ini]
+ [include:testToolbar/manifest.ini]
+
+Each sub-level manifest contains the (`.js`) test files relative to it.
+
+Updating the tests from manifests
+`````````````````````````````````
+
+You may need to update tests as given in manifests from a different source directory.
+`manifestparser update` was made for just this purpose:
+
+.. code-block:: text
+
+ Usage: manifestparser [options] update manifest directory -tag1 -tag2 `key1=value1 --key2=value2 ...
+
+ update the tests as listed in a manifest from a directory
+
+To update from a directory of tests in `~/mozmill/src/mozmill-tests/firefox/` run:
+
+.. code-block:: text
+
+ manifestparser update manifest.ini ~/mozmill/src/mozmill-tests/firefox/
+
+Tests
+`````
+
+manifestparser includes a suite of tests:
+
+https://github.com/mozilla/mozbase/tree/master/manifestparsery/tests
+
+`test_manifest.txt` is a doctest that may be helpful in figuring out
+how to use the API. Tests are run via `python test.py`.
+
+Bugs
+````
+
+Please file any bugs or feature requests at
+
+https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=ManifestParser
+
+Or contact jhammel @mozilla.org or in #ateam on irc.mozilla.org
+
+CLI
+```
+
+Run `manifestparser help` for usage information.
+
+To create a manifest from a set of directories:
+
+.. code-block:: text
+
+ manifestparser [options] create directory <directory> <...> [create-options]
+
+To output a manifest of tests:
+
+.. code-block:: text
+
+ manifestparser [options] write manifest <manifest> <...> -tag1 -tag2 --key1=value1 --key2=value2 ...
+
+To copy tests and manifests from a source:
+
+.. code-block:: text
+
+ manifestparser [options] copy from_manifest to_manifest -tag1 -tag2 `key1=value1 key2=value2 ...
+
+To update the tests associated with with a manifest from a source
+directory:
+
+.. code-block:: text
+
+ manifestparser [options] update manifest from_directory -tag1 -tag2 --key1=value1 --key2=value2 ...
+
+Design Considerations
+`````````````````````
+
+Contrary to some opinion, manifestparser.py and the associated .ini
+format were not magically plucked from the sky but were descended upon
+through several design considerations.
+
+* test manifests should be ordered. While python 2.6 and greater has
+ a ConfigParser that can use an ordered dictionary, it is a
+ requirement that we support python 2.4 for the build + testing
+ environment. To that end, a `read_ini` function was implemented
+ in manifestparser.py that should be the equivalent of the .ini
+ dialect used by ConfigParser.
+
+* the manifest format should be easily human readable/writable. While
+ there was initially some thought of using JSON, there was pushback
+ that JSON was not easily editable. An ideal manifest format would
+ degenerate to a line-separated list of files. While .ini format
+ requires an additional `[]` per line, and while there have been
+ complaints about this, hopefully this is good enough.
+
+* python does not have an in-built YAML parser. Since it was
+ undesirable for manifestparser.py to have any dependencies, YAML was
+ dismissed as a format.
+
+* we could have used a proprietary format but decided against it.
+ Everyone knows .ini and there are good tools to deal with it.
+ However, since read_ini is the only function that transforms a
+ manifest to a list of key, value pairs, while the implications for
+ changing the format impacts downstream code, doing so should be
+ programmatically simple.
+
+* there should be a single file that may easily be
+ transported. Traditionally, test harnesses have lived in
+ mozilla-central. This is less true these days and it is increasingly
+ likely that more tests will not live in mozilla-central going
+ forward. So `manifestparser.py` should be highly consumable. To
+ this end, it is a single file, as appropriate to mozilla-central,
+ which is also a working python package deployed to PyPI for easy
+ installation.
+
+Historical Reference
+````````````````````
+
+Date-ordered list of links about how manifests came to be where they are today::
+
+* https://wiki.mozilla.org/Auto-tools/Projects/UniversalManifest
+* http://alice.nodelman.net/blog/post/2010/05/
+* http://alice.nodelman.net/blog/post/universal-manifest-for-unit-tests-a-proposal/
+* https://elvis314.wordpress.com/2010/07/05/improving-personal-hygiene-by-adjusting-mochitests/
+* https://elvis314.wordpress.com/2010/07/27/types-of-data-we-care-about-in-a-manifest/
+* https://bugzilla.mozilla.org/show_bug.cgi?id=585106
+* http://elvis314.wordpress.com/2011/05/20/converting-xpcshell-from-listing-directories-to-a-manifest/
+* https://bugzilla.mozilla.org/show_bug.cgi?id=616999
+* https://developer.mozilla.org/en/Writing_xpcshell-based_unit_tests#Adding_your_tests_to_the_xpcshell_manifest
diff --git a/testing/mozbase/docs/mozcrash.rst b/testing/mozbase/docs/mozcrash.rst
new file mode 100644
index 000000000..750c46dd8
--- /dev/null
+++ b/testing/mozbase/docs/mozcrash.rst
@@ -0,0 +1,8 @@
+:mod:`mozcrash` --- Print stack traces from minidumps left behind by crashed processes
+======================================================================================
+
+Gets stack traces out of processes that have crashed and left behind
+a minidump file using the Google Breakpad library.
+
+.. automodule:: mozcrash
+ :members: check_for_crashes
diff --git a/testing/mozbase/docs/mozdebug.rst b/testing/mozbase/docs/mozdebug.rst
new file mode 100644
index 000000000..6a4be63f4
--- /dev/null
+++ b/testing/mozbase/docs/mozdebug.rst
@@ -0,0 +1,5 @@
+:mod:`mozdebug` --- Configure and launch compatible debuggers.
+======================================================================================
+
+.. automodule:: mozdebug
+ :members: get_debugger_info, get_default_debugger_name, DebuggerSearch
diff --git a/testing/mozbase/docs/mozdevice.rst b/testing/mozbase/docs/mozdevice.rst
new file mode 100644
index 000000000..5e18b56d2
--- /dev/null
+++ b/testing/mozbase/docs/mozdevice.rst
@@ -0,0 +1,254 @@
+:mod:`mozdevice` --- Interact with remote devices
+=================================================
+
+Mozdevice provides several interfaces to interact with a remote device
+such as an Android- or FirefoxOS-based phone. It allows you to push
+files to these types of devices, launch processes, and more. There are
+currently two available interfaces:
+
+* :ref:`DeviceManager`: Works either via ADB or a custom TCP protocol
+ (the latter requires an agent application running on the device).
+* :ref:`ADB`: Uses the Android Debugger Protocol explicitly
+
+In general, new code should use the ADB abstraction where possible as
+it is simpler and more reliable.
+
+.. automodule:: mozdevice
+
+.. _DeviceManager:
+
+DeviceManager interface
+-----------------------
+.. autoclass:: DeviceManager
+
+Here's an example script which lists the files in '/mnt/sdcard' and sees if a
+process called 'org.mozilla.fennec' is running. In this example, we're
+instantiating the DeviceManagerADB implementation, but we could just
+as easily have used DeviceManagerSUT (assuming the device had an agent
+running speaking the SUT protocol).
+
+::
+
+ import mozdevice
+
+ dm = mozdevice.DeviceManagerADB()
+ print dm.listFiles("/mnt/sdcard")
+ if dm.processExist("org.mozilla.fennec"):
+ print "Fennec is running"
+
+Informational methods
+`````````````````````
+.. automethod:: DeviceManager.getInfo(self, directive=None)
+.. automethod:: DeviceManager.getCurrentTime(self)
+.. automethod:: DeviceManager.getIP
+.. automethod:: DeviceManager.saveScreenshot
+.. automethod:: DeviceManager.recordLogcat
+.. automethod:: DeviceManager.getLogcat
+
+File management methods
+```````````````````````
+.. autoattribute:: DeviceManager.deviceRoot
+.. automethod:: DeviceManager.getDeviceRoot(self)
+.. automethod:: DeviceManager.pushFile(self, localFilename, remoteFilename, retryLimit=1)
+.. automethod:: DeviceManager.pushDir(self, localDirname, remoteDirname, retryLimit=1)
+.. automethod:: DeviceManager.pullFile(self, remoteFilename)
+.. automethod:: DeviceManager.getFile(self, remoteFilename, localFilename)
+.. automethod:: DeviceManager.getDirectory(self, remoteDirname, localDirname, checkDir=True)
+.. automethod:: DeviceManager.validateFile(self, remoteFilename, localFilename)
+.. automethod:: DeviceManager.mkDir(self, remoteDirname)
+.. automethod:: DeviceManager.mkDirs(self, filename)
+.. automethod:: DeviceManager.dirExists(self, dirpath)
+.. automethod:: DeviceManager.fileExists(self, filepath)
+.. automethod:: DeviceManager.listFiles(self, rootdir)
+.. automethod:: DeviceManager.removeFile(self, filename)
+.. automethod:: DeviceManager.removeDir(self, remoteDirname)
+.. automethod:: DeviceManager.chmodDir(self, remoteDirname, mask="777")
+.. automethod:: DeviceManager.getTempDir(self)
+
+Process management methods
+``````````````````````````
+.. automethod:: DeviceManager.shell(self, cmd, outputfile, env=None, cwd=None, timeout=None, root=False)
+.. automethod:: DeviceManager.shellCheckOutput(self, cmd, env=None, cwd=None, timeout=None, root=False)
+.. automethod:: DeviceManager.getProcessList(self)
+.. automethod:: DeviceManager.processExist(self, processName)
+.. automethod:: DeviceManager.killProcess(self, processName)
+
+System control methods
+``````````````````````
+.. automethod:: DeviceManager.reboot(self, ipAddr=None, port=30000)
+
+Application management methods
+``````````````````````````````
+.. automethod:: DeviceManager.uninstallAppAndReboot(self, appName, installPath=None)
+.. automethod:: DeviceManager.installApp(self, appBundlePath, destPath=None)
+.. automethod:: DeviceManager.uninstallApp(self, appName, installPath=None)
+.. automethod:: DeviceManager.updateApp(self, appBundlePath, processName=None, destPath=None, ipAddr=None, port=30000)
+
+DeviceManagerADB implementation
+```````````````````````````````
+
+.. autoclass:: mozdevice.DeviceManagerADB
+
+DeviceManagerADB has several methods that are not present in all
+DeviceManager implementations. Please do not use them in code that
+is meant to be interoperable.
+
+.. automethod:: DeviceManagerADB.forward
+.. automethod:: DeviceManagerADB.remount
+.. automethod:: DeviceManagerADB.devices
+
+DeviceManagerSUT implementation
+```````````````````````````````
+
+.. autoclass:: mozdevice.DeviceManagerSUT
+
+DeviceManagerSUT has several methods that are only used in specific
+tests and are not present in all DeviceManager implementations. Please
+do not use them in code that is meant to be interoperable.
+
+.. automethod:: DeviceManagerSUT.unpackFile
+.. automethod:: DeviceManagerSUT.adjustResolution
+
+Android extensions
+``````````````````
+
+For Android, we provide two variants of the `DeviceManager` interface
+with extensions useful for that platform. These classes are called
+DroidADB and DroidSUT. They inherit all methods from DeviceManagerADB
+and DeviceManagerSUT. Here is the interface for DroidADB:
+
+.. automethod:: mozdevice.DroidADB.launchApplication
+.. automethod:: mozdevice.DroidADB.launchFennec
+.. automethod:: mozdevice.DroidADB.getInstalledApps
+.. automethod:: mozdevice.DroidADB.getAppRoot
+
+These methods are also found in the DroidSUT class.
+
+.. _ADB:
+
+ADB Interface
+-------------
+
+The following classes provide a basic interface to interact with the
+Android Debug Tool (adb) and Android-based devices. It is intended to
+provide the basis for a replacement for DeviceManager and
+DeviceManagerADB.
+
+ADBCommand
+``````````
+
+.. autoclass:: mozdevice.ADBCommand
+
+.. automethod:: ADBCommand.command(self, cmds, timeout=None)
+.. automethod:: ADBCommand.command_output(self, cmds, timeout=None)
+
+ADBHost
+```````
+.. autoclass:: mozdevice.ADBHost
+
+.. automethod:: ADBHost.command(self, cmds, timeout=None)
+.. automethod:: ADBHost.command_output(self, cmds, timeout=None)
+.. automethod:: ADBHost.start_server(self, timeout=None)
+.. automethod:: ADBHost.kill_server(self, timeout=None)
+.. automethod:: ADBHost.devices(self, timeout=None)
+
+ADBDevice
+`````````
+.. autoclass:: mozdevice.ADBDevice
+
+Host Command methods
+++++++++++++++++++++
+.. automethod:: ADBDevice.command(self, cmds, timeout=None)
+.. automethod:: ADBDevice.command_output(self, cmds, timeout=None)
+
+Device Shell methods
+++++++++++++++++++++
+.. automethod:: ADBDevice.shell(self, cmd, env=None, cwd=None, timeout=None, root=False)
+.. automethod:: ADBDevice.shell_bool(self, cmd, env=None, cwd=None, timeout=None, root=False)
+.. automethod:: ADBDevice.shell_output(self, cmd, env=None, cwd=None, timeout=None, root=False)
+
+Informational methods
++++++++++++++++++++++
+.. automethod:: ADBDevice.clear_logcat
+.. automethod:: ADBDevice.get_battery_percentage
+.. automethod:: ADBDevice.get_info
+.. automethod:: ADBDevice.get_logcat
+.. automethod:: ADBDevice.get_prop
+.. automethod:: ADBDevice.get_state
+
+System control methods
+++++++++++++++++++++++
+.. automethod:: ADBDevice.is_device_ready
+.. automethod:: ADBDevice.reboot
+
+File management methods
++++++++++++++++++++++++
+.. automethod:: ADBDevice.chmod
+.. automethod:: ADBDevice.cp
+.. automethod:: ADBDevice.exists
+.. automethod:: ADBDevice.is_dir
+.. automethod:: ADBDevice.is_file
+.. automethod:: ADBDevice.list_files
+.. automethod:: ADBDevice.mkdir
+.. automethod:: ADBDevice.mv
+.. automethod:: ADBDevice.push
+.. automethod:: ADBDevice.rm
+.. automethod:: ADBDevice.rmdir
+.. autoattribute:: ADBDevice.test_root
+
+Process management methods
+++++++++++++++++++++++++++
+.. automethod:: ADBDevice.get_process_list
+.. automethod:: ADBDevice.kill
+.. automethod:: ADBDevice.pkill
+.. automethod:: ADBDevice.process_exist
+
+ADBAndroid
+``````````
+.. autoclass:: ADBAndroid
+
+Informational methods
++++++++++++++++++++++
+.. automethod:: ADBAndroid.get_battery_percentage
+
+System control methods
+++++++++++++++++++++++
+.. automethod:: ADBAndroid.is_device_ready
+.. automethod:: ADBAndroid.power_on
+
+Application management methods
+++++++++++++++++++++++++++++++
+.. automethod:: ADBAndroid.install_app
+.. automethod:: ADBAndroid.is_app_installed
+.. automethod:: ADBAndroid.launch_application
+.. automethod:: ADBAndroid.launch_fennec
+.. automethod:: ADBAndroid.stop_application
+.. automethod:: ADBAndroid.uninstall_app
+.. automethod:: ADBAndroid.update_app
+
+ADBB2G
+``````
+.. autoclass:: ADBB2G
+
+Informational methods
++++++++++++++++++++++
+.. automethod:: ADBB2G.get_battery_percentage
+.. automethod:: ADBB2G.get_info
+.. automethod:: ADBB2G.get_memory_total
+
+ADBProcess
+``````````
+.. autoclass:: mozdevice.ADBProcess
+
+ADBError
+````````
+.. autoexception:: mozdevice.ADBError
+
+ADBRootError
+````````````
+.. autoexception:: mozdevice.ADBRootError
+
+ADBTimeoutError
+```````````````
+.. autoexception:: mozdevice.ADBTimeoutError
+
diff --git a/testing/mozbase/docs/mozfile.rst b/testing/mozbase/docs/mozfile.rst
new file mode 100644
index 000000000..52e1cd4c9
--- /dev/null
+++ b/testing/mozbase/docs/mozfile.rst
@@ -0,0 +1,10 @@
+:mod:`mozfile` --- File utilities for use in Mozilla testing
+============================================================
+
+mozfile is a convenience library for taking care of some common file-related
+tasks in automated testing, such as extracting files or recursively removing
+directories.
+
+.. automodule:: mozfile
+ :members: extract, extract_tarball, extract_zip, move, remove
+
diff --git a/testing/mozbase/docs/mozhttpd.rst b/testing/mozbase/docs/mozhttpd.rst
new file mode 100644
index 000000000..f6ceddb37
--- /dev/null
+++ b/testing/mozbase/docs/mozhttpd.rst
@@ -0,0 +1,22 @@
+
+Serving up content to be consumed by the browser
+================================================
+
+
+.. warning:: The mozhttpd module is considered obsolete. For new code,
+ please use wptserve_ which can do everything mozhttpd does
+ and more.
+
+.. _wptserve: https://pypi.python.org/pypi/wptserve
+
+:mod:`mozhttpd` --- Simple webserver
+------------------------------------
+
+.. automodule:: mozhttpd
+ :members:
+
+Interface
+`````````
+
+.. autoclass:: MozHttpd
+ :members:
diff --git a/testing/mozbase/docs/mozinfo.rst b/testing/mozbase/docs/mozinfo.rst
new file mode 100644
index 000000000..2f9eb5f7e
--- /dev/null
+++ b/testing/mozbase/docs/mozinfo.rst
@@ -0,0 +1,71 @@
+:mod:`mozinfo` --- Get system information
+=========================================
+
+Throughout `mozmill <https://developer.mozilla.org/en/Mozmill>`_
+and other Mozilla python code, checking the underlying
+platform is done in many different ways. The various checks needed
+lead to a lot of copy+pasting, leaving the reader to wonder....is this
+specific check necessary for (e.g.) an operating system? Because
+information is not consolidated, checks are not done consistently, nor
+is it defined what we are checking for.
+
+`mozinfo <https://github.com/mozilla/mozbase/tree/master/mozinfo>`_
+proposes to solve this problem. mozinfo is a bridge interface,
+making the underlying (complex) plethora of OS and architecture
+combinations conform to a subset of values of relevance to
+Mozilla software. The current implementation exposes relevant keys and
+values such as: ``os``, ``version``, ``bits``, and ``processor``. Additionally, the
+service pack in use is available on the windows platform.
+
+
+API Usage
+---------
+
+mozinfo is a python package. Downloading the software and running
+``python setup.py develop`` will allow you to do ``import mozinfo``
+from python.
+`mozinfo.py <https://raw.github.com/mozilla/mozbase/master/mozinfo/mozinfo/mozinfo.py>`_
+is the only file contained is this package,
+so if you need a single-file solution, you can just download or call
+this file through the web.
+
+The top level attributes (``os``, ``version``, ``bits``, ``processor``) are
+available as module globals::
+
+ if mozinfo.os == 'win': ...
+
+In addition, mozinfo exports a dictionary, ``mozinfo.info``, that
+contain these values. mozinfo also exports:
+
+- ``choices``: a dictionary of possible values for os, bits, and
+ processor
+- ``main``: the console_script entry point for mozinfo
+- ``unknown``: a singleton denoting a value that cannot be determined
+
+``unknown`` has the string representation ``"UNKNOWN"``.
+``unknown`` will evaluate as ``False`` in python::
+
+ if not mozinfo.os: ... # unknown!
+
+
+Command Line Usage
+------------------
+
+mozinfo comes with a command line program, ``mozinfo`` which may be used to
+diagnose one's current system.
+
+Example output::
+
+ os: linux
+ version: Ubuntu 10.10
+ bits: 32
+ processor: x86
+
+Three of these fields, os, bits, and processor, have a finite set of
+choices. You may display the value of these choices using
+``mozinfo --os``, ``mozinfo --bits``, and ``mozinfo --processor``.
+``mozinfo --help`` documents command-line usage.
+
+
+.. automodule:: mozinfo
+ :members:
diff --git a/testing/mozbase/docs/mozinstall.rst b/testing/mozbase/docs/mozinstall.rst
new file mode 100644
index 000000000..7db40d73d
--- /dev/null
+++ b/testing/mozbase/docs/mozinstall.rst
@@ -0,0 +1,29 @@
+:mod:`mozinstall` --- Install and uninstall Gecko-based applications
+====================================================================
+
+mozinstall is a small python module with several convenience methods
+useful for installing and uninstalling a gecko-based application
+(e.g. Firefox) on the desktop.
+
+Simple example
+--------------
+
+::
+
+ import mozinstall
+ import tempfile
+
+ tempdir = tempfile.mkdtemp()
+ firefox_dmg = 'firefox-38.0a1.en-US.mac.dmg'
+ install_folder = mozinstall.install(src=firefox_dmg, dest=tempdir)
+ binary = mozinstall.get_binary(install_folder, 'Firefox')
+ # from here you can execute the binary directly
+ # ...
+ mozinstall.uninstall(install_folder)
+
+API Documentation
+-----------------
+
+.. automodule:: mozinstall
+ :members: is_installer, install, get_binary, uninstall,
+ InstallError, InvalidBinary, InvalidSource
diff --git a/testing/mozbase/docs/mozlog.rst b/testing/mozbase/docs/mozlog.rst
new file mode 100644
index 000000000..db26c78b0
--- /dev/null
+++ b/testing/mozbase/docs/mozlog.rst
@@ -0,0 +1,486 @@
+:mod:`mozlog` --- Structured logging for test output
+===============================================================
+
+:py:mod:`mozlog` is a library designed for logging the
+execution and results of test harnesses. The internal data model is a
+stream of JSON-compatible objects, with one object per log entry. The
+default output format is line-based, with one JSON object serialized
+per line.
+
+:py:mod:`mozlog` is *not* based on the stdlib logging
+module, although it shares several concepts with it.
+
+One notable difference between this module and the standard logging
+module is the way that loggers are created. The structured logging
+module does not require that loggers with a specific name are
+singleton objects accessed through a factory function. Instead the
+``StructuredLogger`` constructor may be used directly. However all
+loggers with the same name share the same internal state (the "Borg"
+pattern). In particular the list of handler functions is the same for
+all loggers with the same name.
+
+Typically, you would only instantiate one logger object per
+program. Two convenience methods are provided to set and get the
+default logger in the program.
+
+Logging is threadsafe, with access to handlers protected by a
+``threading.Lock``. However it is `not` process-safe. This means that
+applications using multiple processes, e.g. via the
+``multiprocessing`` module, should arrange for all logging to happen in
+a single process.
+
+Data Format
+-----------
+
+Structured loggers produce messages in a simple format designed to be
+compatible with the JSON data model. Each message is a single object,
+with the type of message indicated by the ``action`` key. It is
+intended that the set of ``action`` values be closed; where there are
+use cases for additional values they should be integrated into this
+module rather than extended in an ad-hoc way. The set of keys present
+on on all messages is:
+
+``action``
+ The type of the message (string).
+
+``time``
+ The timestamp of the message in ms since the epoch (int).
+
+``thread``
+ The name of the thread emitting the message (string).
+
+``pid``
+ The pid of the process creating the message (int).
+
+``source``
+ Name of the logger creating the message (string).
+
+For each ``action`` there are is a further set of specific fields
+describing the details of the event that caused the message to be
+emitted:
+
+``suite_start``
+ Emitted when the testsuite starts running.
+
+ ``tests``
+ A list of test ids. Test ids can either be strings or lists of
+ strings (an example of the latter is reftests where the id has the
+ form [test_url, ref_type, ref_url]) and are assumed to be unique
+ within a given testsuite. In cases where the test list is not
+ known upfront an empty list may be passed (list).
+
+ ``run_info``
+ An optional dictionary describing the properties of the
+ build and test environment. This contains the information provided
+ by :doc:`mozinfo <mozinfo>`, plus a boolean ``debug`` field indicating
+ whether the build under test is a debug build.
+
+``suite_end``
+ Emitted when the testsuite is finished and no more results will be produced.
+
+``test_start``
+ Emitted when a test is being started.
+
+ ``test``
+ A unique id for the test (string or list of strings).
+
+ ``path``
+ Optional path to the test relative to some base (typically the root of the
+ source tree). Mainly used when ``test`` id is not a path (string).
+
+``test_status``
+ Emitted for a test which has subtests to record the result of a
+ single subtest.
+
+ ``test``
+ The same unique id for the test as in the ``test_start`` message.
+
+ ``subtest``
+ Name of the subtest (string).
+
+ ``status``
+ Result of the test (string enum; ``PASS``, ``FAIL``, ``TIMEOUT``,
+ ``NOTRUN``)
+
+ ``expected``
+ Expected result of the test. Omitted if the expected result is the
+ same as the actual result (string enum, same as ``status``).
+
+``test_end``
+ Emitted to give the result of a test with no subtests, or the status
+ of the overall file when there are subtests.
+
+ ``test``
+ The same unique id for the test as in the ``test_start`` message.
+
+ ``status``
+ Either result of the test (if there are no subtests) in which case
+ (string enum ``PASS``, ``FAIL``, ``TIMEOUT``, ``CRASH``,
+ ``ASSERT``, ``SKIP``) or the status of the overall file where
+ there are subtests (string enum ``OK``, ``ERROR``, ``TIMEOUT``,
+ ``CRASH``, ``ASSERT``, ``SKIP``).
+
+ ``expected``
+ The expected status, or omitted if the expected status matches the
+ actual status (string enum, same as ``status``).
+
+``process_output``
+ Output from a managed subprocess.
+
+ ``process``
+ pid of the subprocess.
+
+ ``command``
+ Command used to launch the subprocess.
+
+ ``data``
+ Data output by the subprocess.
+
+``log``
+ General human-readable logging message, used to debug the harnesses
+ themselves rather than to provide input to other tools.
+
+ ``level``
+ Level of the log message (string enum ``CRITICAL``, ``ERROR``,
+ ``WARNING``, ``INFO``, ``DEBUG``).
+
+ ``message``
+ Text of the log message.
+
+Testsuite Protocol
+------------------
+
+When used for testsuites, the following structured logging messages must be emitted:
+
+ * One ``suite_start`` message before any ``test_*`` messages
+
+ * One ``test_start`` message per test that is run
+
+ * One ``test_status`` message per subtest that is run. This might be
+ zero if the test type doesn't have the notion of subtests.
+
+ * One ``test_end`` message per test that is run, after the
+ ``test_start`` and any ``test_status`` messages for that same test.
+
+ * One ``suite_end`` message after all ``test_*`` messages have been
+ emitted.
+
+The above mandatory events may be interspersed with ``process_output``
+and ``log`` events, as required.
+
+Subtests
+~~~~~~~~
+
+The purpose of subtests is to deal with situations where a single test
+produces more than one result, and the exact details of the number of
+results is not known ahead of time. For example consider a test
+harness that loads JavaScript-based tests in a browser. Each url
+loaded would be a single test, with corresponding ``test_start`` and
+``test_end`` messages. If there can be more than one JS-defined test
+on a page, however, it it useful to track the results of those tests
+seperately. Therefore each of those tests is a subtest, and one
+``test_status`` message must be generated for each subtest result.
+
+Subtests must have a name that is unique within their parent test.
+
+Whether or not a test has subtests changes the meaning of the
+``status`` property on the test itself. When the test does not have
+any subtests, this property is the actual test result such as ``PASS``
+or ``FAIL`` . When a test does have subtests, the test itself does not
+have a result as-such; it isn't meaningful to describe it as having a
+``PASS`` result, especially if the subtests did not all pass. Instead
+this property is used to hold information about whether the test ran
+without error. If no errors were detected the test must be given the
+status ``OK``. Otherwise the test may get the status ``ERROR`` (for
+e.g. uncaught JS exceptions), ``TIMEOUT`` (if no results were reported
+in the allowed time) or ``CRASH`` (if the test caused the process
+under test to crash).
+
+StructuredLogger Objects
+------------------------
+
+.. automodule:: mozlog.structuredlog
+ :members: set_default_logger, get_default_logger
+
+.. autoclass:: StructuredLogger
+ :members: add_handler, remove_handler, handlers, suite_start,
+ suite_end, test_start, test_status, test_end,
+ process_output, critical, error, warning, info, debug
+
+.. autoclass:: StructuredLogFileLike
+ :members:
+
+ProxyLogger Objects
+-------------------
+
+Since :func:`mozlog.structuredlog.get_default_logger` return None when
+the default logger is not initialized, it is not possible to directly
+use it at the module level.
+
+With ProxyLogger, it is possible to write the following code: ::
+
+ from mozlog import get_proxy_logger
+
+ LOG = get_proxy_logger('component_name')
+
+
+ def my_function():
+ LOG.info('logging with a module level object')
+
+
+.. note::
+
+ mozlog still needs to be initialized before the first call occurs
+ to a ProxyLogger instance, for example with
+ :func:`mozlog.commandline.setup_logging`.
+
+.. automodule:: mozlog.proxy
+ :members: get_proxy_logger, ProxyLogger
+
+Handlers
+--------
+
+A handler is a callable that is called for each log message produced
+and is responsible for handling the processing of that
+message. The typical example of this is a ``StreamHandler`` which takes
+a log message, invokes a formatter which converts the log to a string,
+and writes it to a file.
+
+.. automodule:: mozlog.handlers
+
+.. autoclass:: BaseHandler
+ :members:
+
+.. autoclass:: StreamHandler
+ :members:
+
+.. autoclass:: LogLevelFilter
+ :members:
+
+.. autoclass:: BufferHandler
+ :members:
+
+Formatters
+----------
+
+Formatters are callables that take a log message, and return either a
+string representation of that message, or ``None`` if that message
+should not appear in the output. This allows formatters to both
+exclude certain items and create internal buffers of the output so
+that, for example, a single string might be returned for a
+``test_end`` message indicating the overall result of the test,
+including data provided in the ``test_status`` messages.
+
+Formatter modules are written so that they can take raw input on stdin
+and write formatted output on stdout. This allows the formatters to be
+invoked as part of a command line for post-processing raw log files.
+
+.. automodule:: mozlog.formatters.base
+
+.. autoclass:: BaseFormatter
+ :members:
+
+.. automodule:: mozlog.formatters.unittest
+
+.. autoclass:: UnittestFormatter
+ :members:
+
+.. automodule:: mozlog.formatters.xunit
+
+.. autoclass:: XUnitFormatter
+ :members:
+
+.. automodule:: mozlog.formatters.html
+
+.. autoclass:: HTMLFormatter
+ :members:
+
+.. automodule:: mozlog.formatters.machformatter
+
+.. autoclass:: MachFormatter
+ :members:
+
+.. automodule:: mozlog.formatters.tbplformatter
+
+.. autoclass:: TbplFormatter
+ :members:
+
+Processing Log Files
+--------------------
+
+The ``mozlog.reader`` module provides utilities for working
+with structured log files.
+
+.. automodule:: mozlog.reader
+ :members:
+
+Integration with argparse
+-------------------------
+
+The `mozlog.commandline` module provides integration with the `argparse`
+module to provide uniform logging-related command line arguments to programs
+using `mozlog`. Each known formatter gets a command line argument of the form
+``--log-{name}``, which takes the name of a file to log to with that format,
+or ``-`` to indicate stdout.
+
+.. automodule:: mozlog.commandline
+ :members:
+
+Simple Examples
+---------------
+
+Log to stdout::
+
+ from mozlog import structuredlog
+ from mozlog import handlers, formatters
+ logger = structuredlog.StructuredLogger("my-test-suite")
+ logger.add_handler(handlers.StreamHandler(sys.stdout,
+ formatters.JSONFormatter()))
+ logger.suite_start(["test-id-1"])
+ logger.test_start("test-id-1")
+ logger.info("This is a message with action='LOG' and level='INFO'")
+ logger.test_status("test-id-1", "subtest-1", "PASS")
+ logger.test_end("test-id-1", "OK")
+ logger.suite_end()
+
+
+Populate an ``argparse.ArgumentParser`` with logging options, and
+create a logger based on the value of those options, defaulting to
+JSON output on stdout if nothing else is supplied::
+
+ import argparse
+ from mozlog import commandline
+
+ parser = argparse.ArgumentParser()
+ # Here one would populate the parser with other options
+ commandline.add_logging_group(parser)
+
+ args = parser.parse_args()
+ logger = commandline.setup_logging("testsuite-name", args, {"raw": sys.stdout})
+
+Count the number of tests that timed out in a testsuite::
+
+ from mozlog import reader
+
+ count = 0
+
+ def handle_test_end(data):
+ global count
+ if data["status"] == "TIMEOUT":
+ count += 1
+
+ reader.each_log(reader.read("my_test_run.log"),
+ {"test_end": handle_test_end})
+
+ print count
+
+More Complete Example
+---------------------
+
+This example shows a complete toy testharness set up to used
+structured logging. It is avaliable as `structured_example.py <_static/structured_example.py>`_:
+
+.. literalinclude:: _static/structured_example.py
+
+Each global function with a name starting
+``test_`` represents a test. A passing test returns without
+throwing. A failing test throws a :py:class:`TestAssertion` exception
+via the :py:func:`assert_equals` function. Throwing anything else is
+considered an error in the test. There is also a :py:func:`expected`
+decorator that is used to annotate tests that are expected to do
+something other than pass.
+
+The main entry point to the test runner is via that :py:func:`main`
+function. This is responsible for parsing command line
+arguments, and initiating the test run. Although the test harness
+itself does not provide any command line arguments, the
+:py:class:`ArgumentParser` object is populated by
+:py:meth:`commandline.add_logging_group`, which provides a generic
+set of structured logging arguments appropriate to all tools producing
+structured logging.
+
+The values of these command line arguments are used to create a
+:py:class:`mozlog.StructuredLogger` object populated with the
+specified handlers and formatters in
+:py:func:`commandline.setup_logging`. The third argument to this
+function is the default arguments to use. In this case the default
+is to output raw (i.e. JSON-formatted) logs to stdout.
+
+The main test harness is provided by the :py:class:`TestRunner`
+class. This class is responsible for scheduling all the tests and
+logging all the results. It is passed the :py:obj:`logger` object
+created from the command line arguments. The :py:meth:`run` method
+starts the test run. Before the run is started it logs a
+``suite_start`` message containing the id of each test that will run,
+and after the testrun is done it logs a ``suite_end`` message.
+
+Individual tests are run in the :py:meth:`run_test` method. For each
+test this logs a ``test_start`` message. It then runs the test and
+logs a ``test_end`` message containing the test name, status, expected
+status, and any informational message about the reason for the
+result. In this test harness there are no subtests, so the
+``test_end`` message has the status of the test and there are no
+``test_status`` messages.
+
+Example Output
+~~~~~~~~~~~~~~
+
+When run without providing any command line options, the raw
+structured log messages are sent to stdout::
+
+ $ python structured_example.py
+
+ {"source": "structured-example", "tests": ["test_that_has_an_error", "test_that_fails", "test_expected_fail", "test_that_passes"], "thread": "MainThread", "time": 1401446682787, "action": "suite_start", "pid": 18456}
+ {"source": "structured-example", "thread": "MainThread", "time": 1401446682787, "action": "log", "message": "Running tests", "level": "INFO", "pid": 18456}
+ {"source": "structured-example", "test": "test_that_has_an_error", "thread": "MainThread", "time": 1401446682787, "action": "test_start", "pid": 18456}
+ {"status": "ERROR", "thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_that_has_an_error", "time": 1401446682788, "action": "test_end", "message": "Traceback (most recent call last):\n File \"structured_example.py\", line 61, in run_test\n func()\n File \"structured_example.py\", line 31, in test_that_has_an_error\n assert_equals(2, 1 + \"1\")\nTypeError: unsupported operand type(s) for +: 'int' and 'str'\n", "expected": "PASS"}
+ {"source": "structured-example", "test": "test_that_fails", "thread": "MainThread", "time": 1401446682788, "action": "test_start", "pid": 18456}
+ {"status": "FAIL", "thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_that_fails", "time": 1401446682788, "action": "test_end", "message": "1 not equal to 2", "expected": "PASS"}
+ {"source": "structured-example", "test": "test_expected_fail", "thread": "MainThread", "time": 1401446682788, "action": "test_start", "pid": 18456}
+ {"status": "FAIL", "thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_expected_fail", "time": 1401446682788, "action": "test_end", "message": "4 not equal to 5"}
+ {"source": "structured-example", "test": "test_that_passes", "thread": "MainThread", "time": 1401446682788, "action": "test_start", "pid": 18456}
+ {"status": "PASS", "source": "structured-example", "test": "test_that_passes", "thread": "MainThread", "time": 1401446682789, "action": "test_end", "pid": 18456}
+ {"action": "suite_end", "source": "structured-example", "pid": 18456, "thread": "MainThread", "time": 1401446682789}
+
+The structured logging module provides a number of command line
+options::
+
+ $ python structured_example.py --help
+
+ usage: structured_example.py [-h] [--log-unittest LOG_UNITTEST]
+ [--log-raw LOG_RAW] [--log-html LOG_HTML]
+ [--log-xunit LOG_XUNIT]
+ [--log-mach LOG_MACH]
+
+ optional arguments:
+ -h, --help show this help message and exit
+
+ Output Logging:
+ Options for logging output. Each option represents a possible logging
+ format and takes a filename to write that format to, or '-' to write to
+ stdout.
+
+ --log-unittest LOG_UNITTEST
+ Unittest style output
+ --log-raw LOG_RAW Raw structured log messages
+ --log-html LOG_HTML HTML report
+ --log-xunit LOG_XUNIT
+ xUnit compatible XML
+ --log-mach LOG_MACH Human-readable output
+
+In order to get human-readable output on stdout and the structured log
+data to go to the file ``structured.log``, we would run::
+
+ $ python structured_example.py --log-mach=- --log-raw=structured.log
+
+ 0:00.00 SUITE_START: MainThread 4
+ 0:01.00 LOG: MainThread INFO Running tests
+ 0:01.00 TEST_START: MainThread test_that_has_an_error
+ 0:01.00 TEST_END: MainThread Harness status ERROR, expected PASS. Subtests passed 0/0. Unexpected 1
+ 0:01.00 TEST_START: MainThread test_that_fails
+ 0:01.00 TEST_END: MainThread Harness status FAIL, expected PASS. Subtests passed 0/0. Unexpected 1
+ 0:01.00 TEST_START: MainThread test_expected_fail
+ 0:02.00 TEST_END: MainThread Harness status FAIL. Subtests passed 0/0. Unexpected 0
+ 0:02.00 TEST_START: MainThread test_that_passes
+ 0:02.00 TEST_END: MainThread Harness status PASS. Subtests passed 0/0. Unexpected 0
+ 0:02.00 SUITE_END: MainThread
diff --git a/testing/mozbase/docs/moznetwork.rst b/testing/mozbase/docs/moznetwork.rst
new file mode 100644
index 000000000..d6ed54b85
--- /dev/null
+++ b/testing/mozbase/docs/moznetwork.rst
@@ -0,0 +1,9 @@
+:mod:`moznetwork` --- Get network information
+=============================================
+
+.. automodule:: moznetwork
+
+ .. automethod:: moznetwork.get_ip
+
+ .. autoclass:: moznetwork.NetworkError
+
diff --git a/testing/mozbase/docs/mozprocess.rst b/testing/mozbase/docs/mozprocess.rst
new file mode 100644
index 000000000..5cd23ad0d
--- /dev/null
+++ b/testing/mozbase/docs/mozprocess.rst
@@ -0,0 +1,324 @@
+:mod:`mozprocess` --- Launch and manage processes
+=================================================
+
+Mozprocess is a process-handling module that provides some additional
+features beyond those available with python's subprocess:
+
+* better handling of child processes, especially on Windows
+* the ability to timeout the process after some absolute period, or some
+ period without any data written to stdout/stderr
+* the ability to specify output handlers that will be called
+ for each line of output produced by the process
+* the ability to specify handlers that will be called on process timeout
+ and normal process termination
+
+Running a process
+-----------------
+
+mozprocess consists of two classes: ProcessHandler inherits from ProcessHandlerMixin.
+
+Let's see how to run a process.
+First, the class should be instanciated with at least one argument which is a command (or a list formed by the command followed by its arguments).
+Then the process can be launched using the *run()* method.
+Finally the *wait()* method will wait until end of execution.
+
+.. code-block:: python
+
+ from mozprocess import processhandler
+
+ # under Windows replace by command = ['dir', '/a']
+ command = ['ls', '-l']
+ p = processhandler.ProcessHandler(command)
+ print("execute command: %s" % p.commandline)
+ p.run()
+ p.wait()
+
+Note that using *ProcessHandler* instead of *ProcessHandlerMixin* will print the output of executed command. The attribute *commandline* provides the launched command.
+
+Collecting process output
+-------------------------
+
+Let's now consider a basic shell script that will print numbers from 1 to 5 waiting 1 second between each.
+This script will be used as a command to launch in further examples.
+
+**proc_sleep_echo.sh**:
+
+.. code-block:: sh
+
+ #!/bin/sh
+
+ for i in 1 2 3 4 5
+ do
+ echo $i
+ sleep 1
+ done
+
+If you are running under Windows, you won't be able to use the previous script (unless using Cygwin).
+So you'll use the following script:
+
+**proc_sleep_echo.bat**:
+
+.. code-block:: bat
+
+ @echo off
+ FOR %%A IN (1 2 3 4 5) DO (
+ ECHO %%A
+ REM if you have TIMEOUT then use it instead of PING
+ REM TIMEOUT /T 1 /NOBREAK
+ PING -n 2 127.0.0.1 > NUL
+ )
+
+Mozprocess allows the specification of custom output handlers to gather process output while running.
+ProcessHandler will by default write all outputs on stdout. You can also provide (to ProcessHandler or ProcessHandlerMixin) a function or a list of functions that will be used as callbacks on each output line generated by the process.
+
+In the following example the command's output will be stored in a file *output.log* and printed in stdout:
+
+.. code-block:: python
+
+ import sys
+ from mozprocess import processhandler
+
+ fd = open('output.log', 'w')
+
+ def tostdout(line):
+ sys.stdout.write("<%s>\n" % line)
+
+ def tofile(line):
+ fd.write("<%s>\n" % line)
+
+ # under Windows you'll replace by 'proc_sleep_echo.bat'
+ command = './proc_sleep_echo.sh'
+ outputs = [tostdout, tofile]
+
+ p = processhandler.ProcessHandlerMixin(command, processOutputLine=outputs)
+ p.run()
+ p.wait()
+
+ fd.close()
+
+The process output can be saved (*obj = ProcessHandler(..., storeOutput=True)*) so as it is possible to request it (*obj.output*) at any time. Note that the default value for *stroreOutput* is *True*, so it is not necessary to provide it in the parameters.
+
+.. code-block:: python
+
+ import time
+ import sys
+ from mozprocess import processhandler
+
+ command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat'
+
+ p = processhandler.ProcessHandler(command, storeOutput=True)
+ p.run()
+ for i in xrange(10):
+ print(p.output)
+ time.sleep(0.5)
+ p.wait()
+
+In previous example, you will see the *p.output* list growing.
+
+Execution
+---------
+
+Status
+``````
+
+It is possible to query the status of the process via *poll()* that will return None if the process is still running, 0 if it ended without failures and a negative value if it was killed by a signal (Unix-only).
+
+.. code-block:: python
+
+ import time
+ import signal
+ from mozprocess import processhandler
+
+ command = './proc_sleep_echo.sh'
+ p = processhandler.ProcessHandler(command)
+ p.run()
+ time.sleep(2)
+ print("poll status: %s" % p.poll())
+ time.sleep(1)
+ p.kill(signal.SIGKILL)
+ print("poll status: %s" % p.poll())
+
+Timeout
+```````
+
+A timeout can be provided to the *run()* method. If the process last more than timeout seconds, it will be stopped.
+
+After execution, the property *timedOut* will be set to True if a timeout was reached.
+
+It is also possible to provide functions (*obj = ProcessHandler[Mixin](..., onTimeout=functions)*) that will be called if the timeout was reached.
+
+.. code-block:: python
+
+ from mozprocess import processhandler
+
+ def ontimeout():
+ print("REACHED TIMEOUT")
+
+ command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat'
+ functions = [ontimeout]
+ p = processhandler.ProcessHandler(command, onTimeout=functions)
+ p.run(timeout=2)
+ p.wait()
+ print("timedOut = %s" % p.timedOut)
+
+By default the process will be killed on timeout but it is possible to prevent this by setting *kill_on_timeout* to *False*.
+
+.. code-block:: python
+
+ p = processhandler.ProcessHandler(command, onTimeout=functions, kill_on_timeout=False)
+ p.run(timeout=2)
+ p.wait()
+ print("timedOut = %s" % p.timedOut)
+
+In this case, no output will be available after the timeout, but the process will still be running.
+
+Waiting
+```````
+
+It is possible to wait until the process exits as already seen with the method *wait()*, or until the end of a timeout if given. Note that in last case the process is still alive after the timeout.
+
+.. code-block:: python
+
+ command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat'
+ p = processhandler.ProcessHandler(command)
+ p.run()
+ p.wait(timeout=2)
+ print("timedOut = %s" % p.timedOut)
+ p.wait()
+
+Killing
+```````
+
+You can request to kill the process with the method *kill*. f the parameter "ignore_children" is set to False when the process handler class is initialized, all the process's children will be killed as well.
+
+Except on Windows, you can specify the signal with which to kill method the process (e.g.: *kill(signal.SIGKILL)*).
+
+.. code-block:: python
+
+ import time
+ from mozprocess import processhandler
+
+ command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat'
+ p = processhandler.ProcessHandler(command)
+ p.run()
+ time.sleep(2)
+ p.kill()
+
+End of execution
+````````````````
+
+You can provide a function or a list of functions to call at the end of the process using the initilization parameter *onFinish*.
+
+.. code-block:: python
+
+ from mozprocess import processhandler
+
+ def finish():
+ print("Finished!!")
+
+ command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat'
+
+ p = processhandler.ProcessHandler(command, onFinish=finish)
+ p.run()
+ p.wait()
+
+Child management
+----------------
+
+Consider the following scripts:
+
+**proc_child.sh**:
+
+.. code-block:: sh
+
+ #!/bin/sh
+ for i in a b c d e
+ do
+ echo $i
+ sleep 1
+ done
+
+**proc_parent.sh**:
+
+.. code-block:: sh
+
+ #!/bin/sh
+ ./proc_child.sh
+ for i in 1 2 3 4 5
+ do
+ echo $i
+ sleep 1
+ done
+
+For windows users consider:
+
+**proc_child.bat**:
+
+.. code-block:: bat
+
+ @echo off
+ FOR %%A IN (a b c d e) DO (
+ ECHO %%A
+ REM TIMEOUT /T 1 /NOBREAK
+ PING -n 2 127.0.0.1 > NUL
+ )
+
+**proc_parent.bat**:
+
+.. code-block:: bat
+
+ @echo off
+ call proc_child.bat
+ FOR %%A IN (1 2 3 4 5) DO (
+ ECHO %%A
+ REM TIMEOUT /T 1 /NOBREAK
+ PING -n 2 127.0.0.1 > NUL
+ )
+
+For processes that launch other processes, mozprocess allows you to get child running status, wait for child termination, and kill children.
+
+Ignoring children
+`````````````````
+
+By default the *ignore_children* option is False. In that case, killing the main process will kill all its children at the same time.
+
+.. code-block:: python
+
+ import time
+ from mozprocess import processhandler
+
+ def finish():
+ print("Finished")
+
+ command = './proc_parent.sh'
+ p = processhandler.ProcessHandler(command, ignore_children=False, onFinish=finish)
+ p.run()
+ time.sleep(2)
+ print("kill")
+ p.kill()
+
+If *ignore_children* is set to *True*, killing will apply only to the main process that will wait children end of execution before stoping (join).
+
+.. code-block:: python
+
+ import time
+ from mozprocess import processhandler
+
+ def finish():
+ print("Finished")
+
+ command = './proc_parent.sh'
+ p = processhandler.ProcessHandler(command, ignore_children=True, onFinish=finish)
+ p.run()
+ time.sleep(2)
+ print("kill")
+ p.kill()
+
+API Documentation
+-----------------
+
+.. module:: mozprocess
+.. autoclass:: ProcessHandlerMixin
+ :members: __init__, timedOut, commandline, run, kill, processOutputLine, onTimeout, onFinish, wait
+.. autoclass:: ProcessHandler
+ :members:
diff --git a/testing/mozbase/docs/mozprofile.rst b/testing/mozbase/docs/mozprofile.rst
new file mode 100644
index 000000000..85428e835
--- /dev/null
+++ b/testing/mozbase/docs/mozprofile.rst
@@ -0,0 +1,99 @@
+:mod:`mozprofile` --- Create and modify Mozilla application profiles
+====================================================================
+
+Mozprofile_ is a python tool for creating and managing profiles for Mozilla's
+applications (Firefox, Thunderbird, etc.). In addition to creating profiles,
+mozprofile can install addons_ and set preferences_ Mozprofile can be utilized
+from the command line or as an API.
+
+The preferred way of setting up profile data (addons, permissions, preferences
+etc) is by passing them to the profile_ constructor.
+
+Addons
+------
+
+.. automodule:: mozprofile.addons
+ :members:
+
+Addons may be installed individually or from a manifest.
+
+Example::
+
+ from mozprofile import FirefoxProfile
+
+ # create new profile to pass to mozmill/mozrunner
+ profile = FirefoxProfile(addons=["adblock.xpi"])
+
+Command Line Interface
+----------------------
+
+.. automodule:: mozprofile.cli
+ :members:
+
+The profile to be operated on may be specified with the ``--profile``
+switch. If a profile is not specified, one will be created in a
+temporary directory which will be echoed to the terminal::
+
+ (mozmill)> mozprofile
+ /tmp/tmp4q1iEU.mozrunner
+ (mozmill)> ls /tmp/tmp4q1iEU.mozrunner
+ user.js
+
+To run mozprofile from the command line enter:
+``mozprofile --help`` for a list of options.
+
+Permissions
+-----------
+
+.. automodule:: mozprofile.permissions
+ :members:
+
+You can set permissions by creating a ``ServerLocations`` object that you pass
+to the ``Profile`` constructor. Hosts can be added to it with
+``add_host(host, port)``. ``port`` can be 0.
+
+Preferences
+-----------
+
+.. automodule:: mozprofile.prefs
+ :members:
+
+Preferences can be set in several ways:
+
+- using the API: You can make a dictionary with the preferences and pass it to
+ the ``Profile`` constructor. You can also add more preferences with the
+ ``Profile.set_preferences`` method.
+- using a JSON blob file: ``mozprofile --preferences myprefs.json``
+- using a ``.ini`` file: ``mozprofile --preferences myprefs.ini``
+- via the command line: ``mozprofile --pref key:value --pref key:value [...]``
+
+When setting preferences from an ``.ini`` file or the ``--pref`` switch,
+the value will be interpolated as an integer or a boolean
+(``true``/``false``) if possible.
+
+Profile
+--------------------
+
+.. automodule:: mozprofile.profile
+ :members:
+
+Resources
+-----------
+Other Mozilla programs offer additional and overlapping functionality
+for profiles. There is also substantive documentation on profiles and
+their management.
+
+- ProfileManager_: XULRunner application for managing profiles. Has a GUI and CLI.
+- python-profilemanager_: python CLI interface similar to ProfileManager
+- profile documentation_
+
+
+.. _Mozprofile: https://github.com/mozilla/mozbase/tree/master/mozprofile
+.. _addons: https://developer.mozilla.org/en/addons
+.. _preferences: https://developer.mozilla.org/En/A_Brief_Guide_to_Mozilla_Preferences
+.. _mozprofile.profile: https://github.com/mozilla/mozbase/tree/master/mozprofile/mozprofile/profile.py
+.. _AddonManager: https://github.com/mozilla/mozbase/tree/master/mozprofile/mozprofile/addons.py
+.. _here: https://github.com/mozilla/mozbase/blob/master/mozprofile/mozprofile/permissions.py
+.. _ProfileManager: https://developer.mozilla.org/en/Profile_Manager
+.. _python-profilemanager: http://k0s.org/mozilla/hg/profilemanager/
+.. _documentation: http://support.mozilla.com/en-US/kb/Profiles
diff --git a/testing/mozbase/docs/mozrunner.rst b/testing/mozbase/docs/mozrunner.rst
new file mode 100644
index 000000000..766b02cc4
--- /dev/null
+++ b/testing/mozbase/docs/mozrunner.rst
@@ -0,0 +1,177 @@
+:mod:`mozrunner` --- Manage remote and local gecko processes
+============================================================
+
+Mozrunner provides an API to manage a gecko-based application with an
+arbitrary configuration profile. It currently supports local desktop
+binaries such as Firefox and Thunderbird, as well as Firefox OS on
+mobile devices and emulators.
+
+
+Basic usage
+-----------
+
+The simplest way to use mozrunner, is to instantiate a runner, start it
+and then wait for it to finish:
+
+.. code-block:: python
+
+ from mozrunner import FirefoxRunner
+ binary = 'path/to/firefox/binary'
+ runner = FirefoxRunner(binary=binary)
+ runner.start()
+ runner.wait()
+
+This automatically creates and uses a default mozprofile object. If you
+wish to use a specialized or pre-existing profile, you can create a
+:doc:`mozprofile <mozprofile>` object and pass it in:
+
+.. code-block:: python
+
+ from mozprofile import FirefoxProfile
+ from mozrunner import FirefoxRunner
+ import os
+
+ binary = 'path/to/firefox/binary'
+ profile_path = 'path/to/profile'
+ if os.path.exists(profile_path):
+ profile = FirefoxProfile.clone(path_from=profile_path)
+ else:
+ profile = FirefoxProfile(profile=profile_path)
+ runner = FirefoxRunner(binary=binary, profile=profile)
+ runner.start()
+ runner.wait()
+
+
+Handling output
+---------------
+
+By default, mozrunner dumps the output of the gecko process to standard output.
+It is possible to add arbitrary output handlers by passing them in via the
+`process_args` argument. Be careful, passing in a handler overrides the default
+behaviour. So if you want to use a handler in addition to dumping to stdout, you
+need to specify that explicitly. For example:
+
+.. code-block:: python
+
+ from mozrunner import FirefoxRunner
+
+ def handle_output_line(line):
+ do_something(line)
+
+ binary = 'path/to/firefox/binary'
+ process_args = { 'stream': sys.stdout,
+ 'processOutputLine': [handle_output_line] }
+ runner = FirefoxRunner(binary=binary, process_args=process_args)
+
+Mozrunner uses :doc:`mozprocess <mozprocess>` to manage the underlying gecko
+process and handle output. See the :doc:`mozprocess documentation <mozprocess>`
+for all available arguments accepted by `process_args`.
+
+
+Handling timeouts
+-----------------
+
+Sometimes gecko can hang, or maybe it is just taking too long. To handle this case you
+may want to set a timeout. Mozrunner has two kinds of timeouts, the
+traditional `timeout`, and the `outputTimeout`. These get passed into the
+`runner.start()` method. Setting `timeout` will cause gecko to be killed after
+the specified number of seconds, no matter what. Setting `outputTimeout` will cause
+gecko to be killed after the specified number of seconds with no output. In both
+cases the process handler's `onTimeout` callbacks will be triggered.
+
+.. code-block:: python
+
+ from mozrunner import FirefoxRunner
+
+ def on_timeout():
+ print('timed out after 10 seconds with no output!')
+
+ binary = 'path/to/firefox/binary'
+ process_args = { 'onTimeout': on_timeout }
+ runner = FirefoxRunner(binary=binary, process_args=process_args)
+ runner.start(outputTimeout=10)
+ runner.wait()
+
+The `runner.wait()` method also accepts a timeout argument. But unlike the arguments
+to `runner.start()`, this one simply returns from the wait call and does not kill the
+gecko process.
+
+.. code-block:: python
+
+ runner.start(timeout=100)
+
+ waiting = 0
+ while runner.wait(timeout=1) is None:
+ waiting += 1
+ print("Been waiting for %d seconds so far.." % waiting)
+ assert waiting <= 100
+
+
+Using a device runner
+---------------------
+
+The previous examples used a GeckoRuntimeRunner. If you want to control a
+gecko process on a remote device, you need to use a DeviceRunner. The api is
+nearly identical except you don't pass in a binary, instead you create a device
+object. For example, for B2G (Firefox OS) emulators you might do:
+
+.. code-block:: python
+
+ from mozrunner import B2GEmulatorRunner
+
+ b2g_home = 'path/to/B2G'
+ runner = B2GEmulatorRunner(arch='arm', b2g_home=b2g_home)
+ runner.start()
+ runner.wait()
+
+Device runners have a `device` object. Remember that the gecko process runs on
+the device. In the case of the emulator, it is possible to start the
+device independently of the gecko process.
+
+.. code-block:: python
+
+ runner.device.start() # launches the emulator (which also launches gecko)
+ runner.start() # stops the gecko process, installs the profile, restarts the gecko process
+
+
+Runner API Documentation
+------------------------
+
+Application Runners
+~~~~~~~~~~~~~~~~~~~
+.. automodule:: mozrunner.runners
+ :members:
+
+BaseRunner
+~~~~~~~~~~
+.. autoclass:: mozrunner.base.BaseRunner
+ :members:
+
+GeckoRuntimeRunner
+~~~~~~~~~~~~~~~~~~
+.. autoclass:: mozrunner.base.GeckoRuntimeRunner
+ :show-inheritance:
+ :members:
+
+DeviceRunner
+~~~~~~~~~~~~
+.. autoclass:: mozrunner.base.DeviceRunner
+ :show-inheritance:
+ :members:
+
+Device API Documentation
+------------------------
+
+Generally using the device classes directly shouldn't be required, but in some
+cases it may be desirable.
+
+Device
+~~~~~~
+.. autoclass:: mozrunner.devices.Device
+ :members:
+
+Emulator
+~~~~~~~~
+.. autoclass:: mozrunner.devices.Emulator
+ :show-inheritance:
+ :members:
diff --git a/testing/mozbase/docs/mozversion.rst b/testing/mozbase/docs/mozversion.rst
new file mode 100644
index 000000000..21b028d17
--- /dev/null
+++ b/testing/mozbase/docs/mozversion.rst
@@ -0,0 +1,112 @@
+:mod:`mozversion` --- Get application information
+=================================================
+
+`mozversion <https://github.com/mozilla/mozbase/tree/master/mozversion>`_
+provides version information such as the application name and the changesets
+that it has been built from. This is commonly used in reporting or for
+conditional logic based on the application under test.
+
+Note that mozversion can report the version of remote devices (e.g. Firefox OS)
+but it requires the :mod:`mozdevice` dependency in that case. You can require it
+along with mozversion by using the extra *device* dependency:
+
+.. code-block:: bash
+
+ pip install mozversion[device]
+
+
+API Usage
+---------
+
+.. automodule:: mozversion
+ :members: get_version
+
+Examples
+````````
+
+Firefox::
+
+ import mozversion
+
+ version = mozversion.get_version(binary='/path/to/firefox-bin')
+ for (key, value) in sorted(version.items()):
+ if value:
+ print '%s: %s' % (key, value)
+
+Firefox for Android::
+
+ version = mozversion.get_version(binary='path/to/firefox.apk')
+ print version['application_changeset'] # gets hg revision of build
+
+FirefoxOS::
+
+ version = mozversion.get_version(sources='path/to/sources.xml', dm_type='adb')
+ print version['gaia_changeset'] # gets gaia git revision
+
+Command Line Usage
+------------------
+
+mozversion comes with a command line program, ``mozversion`` which may be used to
+get version information from an application.
+
+Usage::
+
+ mozversion [options]
+
+Options
+```````
+
+---binary
+'''''''''
+
+This is the path to the target application binary or .apk. If this is omitted
+then the current directory is checked for the existance of an
+application.ini file. If not found, then it is assumed the target
+application is a remote Firefox OS instance.
+
+
+---sources
+''''''''''
+
+The path to the sources.xml that accompanies the target application (Firefox OS
+only). If this is omitted then the current directory is checked for the
+existance of a sources.xml file.
+
+Examples
+````````
+
+Firefox::
+
+ $ mozversion --binary=/path/to/firefox-bin
+ application_buildid: 20131205075310
+ application_changeset: 39faf812aaec
+ application_name: Firefox
+ application_repository: http://hg.mozilla.org/releases/mozilla-release
+ application_version: 26.0
+ platform_buildid: 20131205075310
+ platform_changeset: 39faf812aaec
+ platform_repository: http://hg.mozilla.org/releases/mozilla-release
+
+Firefox for Android::
+
+ $ mozversion --binary=/path/to/firefox.apk
+
+Firefox OS::
+
+ $ mozversion --sources=/path/to/sources.xml
+ application_buildid: 20140106040201
+ application_changeset: 14ac61461f2a
+ application_name: B2G
+ application_repository: http://hg.mozilla.org/mozilla-central
+ application_version: 29.0a1
+ build_changeset: 59605a7c026ff06cc1613af3938579b1dddc6cfe
+ device_firmware_date: 1380051975
+ device_firmware_version_incremental: 139
+ device_firmware_version_release: 4.0.4
+ device_id: msm7627a
+ gaia_changeset: 9a222ac02db176e47299bb37112ae40aeadbeca7
+ gaia_date: 1389005812
+ gecko_changeset: 3a2d8af198510726b063a217438fcf2591f4dfcf
+ platform_buildid: 20140106040201
+ platform_changeset: 14ac61461f2a
+ platform_repository: http://hg.mozilla.org/mozilla-central
diff --git a/testing/mozbase/docs/requirements.txt b/testing/mozbase/docs/requirements.txt
new file mode 100644
index 000000000..53dd4ca67
--- /dev/null
+++ b/testing/mozbase/docs/requirements.txt
@@ -0,0 +1 @@
+marionette_client
diff --git a/testing/mozbase/docs/setuprunning.rst b/testing/mozbase/docs/setuprunning.rst
new file mode 100644
index 000000000..c1e781bc0
--- /dev/null
+++ b/testing/mozbase/docs/setuprunning.rst
@@ -0,0 +1,18 @@
+Set up and running
+------------------
+
+Activities under this domain include installing the software, creating
+a profile (a set of configuration settings), running a program in a
+controlled environment such that it can be shut down safely, and
+correctly handling the case where the system crashes.
+
+.. toctree::
+ :maxdepth: 2
+
+ mozfile
+ mozinstall
+ mozprofile
+ mozprocess
+ mozrunner
+ mozcrash
+ mozdebug
diff --git a/testing/mozbase/manifestparser/manifestparser/__init__.py b/testing/mozbase/manifestparser/manifestparser/__init__.py
new file mode 100644
index 000000000..43c58ae79
--- /dev/null
+++ b/testing/mozbase/manifestparser/manifestparser/__init__.py
@@ -0,0 +1,8 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .manifestparser import *
+from .expression import *
+from .ini import *
diff --git a/testing/mozbase/manifestparser/manifestparser/cli.py b/testing/mozbase/manifestparser/manifestparser/cli.py
new file mode 100644
index 000000000..482575d29
--- /dev/null
+++ b/testing/mozbase/manifestparser/manifestparser/cli.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Mozilla universal manifest parser
+"""
+
+from optparse import OptionParser
+import os
+import sys
+
+from .manifestparser import (
+ convert,
+ ManifestParser,
+)
+
+
+class ParserError(Exception):
+ """error for exceptions while parsing the command line"""
+
+
+def parse_args(_args):
+ """
+ parse and return:
+ --keys=value (or --key value)
+ -tags
+ args
+ """
+
+ # return values
+ _dict = {}
+ tags = []
+ args = []
+
+ # parse the arguments
+ key = None
+ for arg in _args:
+ if arg.startswith('---'):
+ raise ParserError("arguments should start with '-' or '--' only")
+ elif arg.startswith('--'):
+ if key:
+ raise ParserError("Key %s still open" % key)
+ key = arg[2:]
+ if '=' in key:
+ key, value = key.split('=', 1)
+ _dict[key] = value
+ key = None
+ continue
+ elif arg.startswith('-'):
+ if key:
+ raise ParserError("Key %s still open" % key)
+ tags.append(arg[1:])
+ continue
+ else:
+ if key:
+ _dict[key] = arg
+ continue
+ args.append(arg)
+
+ # return values
+ return (_dict, tags, args)
+
+
+class CLICommand(object):
+ usage = '%prog [options] command'
+
+ def __init__(self, parser):
+ self._parser = parser # master parser
+
+ def parser(self):
+ return OptionParser(usage=self.usage, description=self.__doc__,
+ add_help_option=False)
+
+
+class Copy(CLICommand):
+ usage = '%prog [options] copy manifest directory -tag1 -tag2 --key1=value1 --key2=value2 ...'
+
+ def __call__(self, options, args):
+ # parse the arguments
+ try:
+ kwargs, tags, args = parse_args(args)
+ except ParserError, e:
+ self._parser.error(e.message)
+
+ # make sure we have some manifests, otherwise it will
+ # be quite boring
+ if not len(args) == 2:
+ HelpCLI(self._parser)(options, ['copy'])
+ return
+
+ # read the manifests
+ # TODO: should probably ensure these exist here
+ manifests = ManifestParser()
+ manifests.read(args[0])
+
+ # print the resultant query
+ manifests.copy(args[1], None, *tags, **kwargs)
+
+
+class CreateCLI(CLICommand):
+ """
+ create a manifest from a list of directories
+ """
+ usage = '%prog [options] create directory <directory> <...>'
+
+ def parser(self):
+ parser = CLICommand.parser(self)
+ parser.add_option('-p', '--pattern', dest='pattern',
+ help="glob pattern for files")
+ parser.add_option('-i', '--ignore', dest='ignore',
+ default=[], action='append',
+ help='directories to ignore')
+ parser.add_option('-w', '--in-place', dest='in_place',
+ help='Write .ini files in place; filename to write to')
+ return parser
+
+ def __call__(self, _options, args):
+ parser = self.parser()
+ options, args = parser.parse_args(args)
+
+ # need some directories
+ if not len(args):
+ parser.print_usage()
+ return
+
+ # add the directories to the manifest
+ for arg in args:
+ assert os.path.exists(arg)
+ assert os.path.isdir(arg)
+ manifest = convert(args, pattern=options.pattern, ignore=options.ignore,
+ write=options.in_place)
+ if manifest:
+ print manifest
+
+
+class WriteCLI(CLICommand):
+ """
+ write a manifest based on a query
+ """
+ usage = '%prog [options] write manifest <manifest> -tag1 -tag2 --key1=value1 --key2=value2 ...'
+
+ def __call__(self, options, args):
+
+ # parse the arguments
+ try:
+ kwargs, tags, args = parse_args(args)
+ except ParserError, e:
+ self._parser.error(e.message)
+
+ # make sure we have some manifests, otherwise it will
+ # be quite boring
+ if not args:
+ HelpCLI(self._parser)(options, ['write'])
+ return
+
+ # read the manifests
+ # TODO: should probably ensure these exist here
+ manifests = ManifestParser()
+ manifests.read(*args)
+
+ # print the resultant query
+ manifests.write(global_tags=tags, global_kwargs=kwargs)
+
+
+class HelpCLI(CLICommand):
+ """
+ get help on a command
+ """
+ usage = '%prog [options] help [command]'
+
+ def __call__(self, options, args):
+ if len(args) == 1 and args[0] in commands:
+ commands[args[0]](self._parser).parser().print_help()
+ else:
+ self._parser.print_help()
+ print '\nCommands:'
+ for command in sorted(commands):
+ print ' %s : %s' % (command, commands[command].__doc__.strip())
+
+
+class UpdateCLI(CLICommand):
+ """
+ update the tests as listed in a manifest from a directory
+ """
+ usage = '%prog [options] update manifest directory -tag1 -tag2 --key1=value1 --key2=value2 ...'
+
+ def __call__(self, options, args):
+ # parse the arguments
+ try:
+ kwargs, tags, args = parse_args(args)
+ except ParserError, e:
+ self._parser.error(e.message)
+
+ # make sure we have some manifests, otherwise it will
+ # be quite boring
+ if not len(args) == 2:
+ HelpCLI(self._parser)(options, ['update'])
+ return
+
+ # read the manifests
+ # TODO: should probably ensure these exist here
+ manifests = ManifestParser()
+ manifests.read(args[0])
+
+ # print the resultant query
+ manifests.update(args[1], None, *tags, **kwargs)
+
+
+# command -> class mapping
+commands = {'create': CreateCLI,
+ 'help': HelpCLI,
+ 'update': UpdateCLI,
+ 'write': WriteCLI}
+
+
+def main(args=sys.argv[1:]):
+ """console_script entry point"""
+
+ # set up an option parser
+ usage = '%prog [options] [command] ...'
+ description = "%s. Use `help` to display commands" % __doc__.strip()
+ parser = OptionParser(usage=usage, description=description)
+ parser.add_option('-s', '--strict', dest='strict',
+ action='store_true', default=False,
+ help='adhere strictly to errors')
+ parser.disable_interspersed_args()
+
+ options, args = parser.parse_args(args)
+
+ if not args:
+ HelpCLI(parser)(options, args)
+ parser.exit()
+
+ # get the command
+ command = args[0]
+ if command not in commands:
+ parser.error("Command must be one of %s (you gave '%s')" %
+ (', '.join(sorted(commands.keys())), command))
+
+ handler = commands[command](parser)
+ handler(options, args[1:])
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/manifestparser/manifestparser/expression.py b/testing/mozbase/manifestparser/manifestparser/expression.py
new file mode 100644
index 000000000..6b705ead9
--- /dev/null
+++ b/testing/mozbase/manifestparser/manifestparser/expression.py
@@ -0,0 +1,324 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+import sys
+import traceback
+
+__all__ = ['parse', 'ParseError', 'ExpressionParser']
+
+# expr.py
+# from:
+# http://k0s.org/mozilla/hg/expressionparser
+# http://hg.mozilla.org/users/tmielczarek_mozilla.com/expressionparser
+
+# Implements a top-down parser/evaluator for simple boolean expressions.
+# ideas taken from http://effbot.org/zone/simple-top-down-parsing.htm
+#
+# Rough grammar:
+# expr := literal
+# | '(' expr ')'
+# | expr '&&' expr
+# | expr '||' expr
+# | expr '==' expr
+# | expr '!=' expr
+# | expr '<' expr
+# | expr '>' expr
+# | expr '<=' expr
+# | expr '>=' expr
+# literal := BOOL
+# | INT
+# | STRING
+# | IDENT
+# BOOL := true|false
+# INT := [0-9]+
+# STRING := "[^"]*"
+# IDENT := [A-Za-z_]\w*
+
+# Identifiers take their values from a mapping dictionary passed as the second
+# argument.
+
+# Glossary (see above URL for details):
+# - nud: null denotation
+# - led: left detonation
+# - lbp: left binding power
+# - rbp: right binding power
+
+
+class ident_token(object):
+
+ def __init__(self, scanner, value):
+ self.value = value
+
+ def nud(self, parser):
+ # identifiers take their value from the value mappings passed
+ # to the parser
+ return parser.value(self.value)
+
+
+class literal_token(object):
+
+ def __init__(self, scanner, value):
+ self.value = value
+
+ def nud(self, parser):
+ return self.value
+
+
+class eq_op_token(object):
+ "=="
+
+ def led(self, parser, left):
+ return left == parser.expression(self.lbp)
+
+
+class neq_op_token(object):
+ "!="
+
+ def led(self, parser, left):
+ return left != parser.expression(self.lbp)
+
+
+class lt_op_token(object):
+ "<"
+
+ def led(self, parser, left):
+ return left < parser.expression(self.lbp)
+
+
+class gt_op_token(object):
+ ">"
+
+ def led(self, parser, left):
+ return left > parser.expression(self.lbp)
+
+
+class le_op_token(object):
+ "<="
+
+ def led(self, parser, left):
+ return left <= parser.expression(self.lbp)
+
+
+class ge_op_token(object):
+ ">="
+
+ def led(self, parser, left):
+ return left >= parser.expression(self.lbp)
+
+
+class not_op_token(object):
+ "!"
+
+ def nud(self, parser):
+ return not parser.expression(100)
+
+
+class and_op_token(object):
+ "&&"
+
+ def led(self, parser, left):
+ right = parser.expression(self.lbp)
+ return left and right
+
+
+class or_op_token(object):
+ "||"
+
+ def led(self, parser, left):
+ right = parser.expression(self.lbp)
+ return left or right
+
+
+class lparen_token(object):
+ "("
+
+ def nud(self, parser):
+ expr = parser.expression()
+ parser.advance(rparen_token)
+ return expr
+
+
+class rparen_token(object):
+ ")"
+
+
+class end_token(object):
+ """always ends parsing"""
+
+# derived literal tokens
+
+
+class bool_token(literal_token):
+
+ def __init__(self, scanner, value):
+ value = {'true': True, 'false': False}[value]
+ literal_token.__init__(self, scanner, value)
+
+
+class int_token(literal_token):
+
+ def __init__(self, scanner, value):
+ literal_token.__init__(self, scanner, int(value))
+
+
+class string_token(literal_token):
+
+ def __init__(self, scanner, value):
+ literal_token.__init__(self, scanner, value[1:-1])
+
+precedence = [(end_token, rparen_token),
+ (or_op_token,),
+ (and_op_token,),
+ (lt_op_token, gt_op_token, le_op_token, ge_op_token,
+ eq_op_token, neq_op_token),
+ (lparen_token,),
+ ]
+for index, rank in enumerate(precedence):
+ for token in rank:
+ token.lbp = index # lbp = lowest left binding power
+
+
+class ParseError(Exception):
+ """error parsing conditional expression"""
+
+
+class ExpressionParser(object):
+ """
+ A parser for a simple expression language.
+
+ The expression language can be described as follows::
+
+ EXPRESSION ::= LITERAL | '(' EXPRESSION ')' | '!' EXPRESSION | EXPRESSION OP EXPRESSION
+ OP ::= '==' | '!=' | '<' | '>' | '<=' | '>=' | '&&' | '||'
+ LITERAL ::= BOOL | INT | IDENT | STRING
+ BOOL ::= 'true' | 'false'
+ INT ::= [0-9]+
+ IDENT ::= [a-zA-Z_]\w*
+ STRING ::= '"' [^\"] '"' | ''' [^\'] '''
+
+ At its core, expressions consist of booleans, integers, identifiers and.
+ strings. Booleans are one of *true* or *false*. Integers are a series
+ of digits. Identifiers are a series of English letters and underscores.
+ Strings are a pair of matching quote characters (single or double) with
+ zero or more characters inside.
+
+ Expressions can be combined with operators: the equals (==) and not
+ equals (!=) operators compare two expressions and produce a boolean. The
+ and (&&) and or (||) operators take two expressions and produce the logical
+ AND or OR value of them, respectively. An expression can also be prefixed
+ with the not (!) operator, which produces its logical negation.
+
+ Finally, any expression may be contained within parentheses for grouping.
+
+ Identifiers take their values from the mapping provided.
+ """
+
+ scanner = None
+
+ def __init__(self, text, valuemapping, strict=False):
+ """
+ Initialize the parser
+ :param text: The expression to parse as a string.
+ :param valuemapping: A dict mapping identifier names to values.
+ :param strict: If true, referencing an identifier that was not
+ provided in :valuemapping: will raise an error.
+ """
+ self.text = text
+ self.valuemapping = valuemapping
+ self.strict = strict
+
+ def _tokenize(self):
+ """
+ Lex the input text into tokens and yield them in sequence.
+ """
+ if not ExpressionParser.scanner:
+ ExpressionParser.scanner = re.Scanner([
+ # Note: keep these in sync with the class docstring above.
+ (r"true|false", bool_token),
+ (r"[a-zA-Z_]\w*", ident_token),
+ (r"[0-9]+", int_token),
+ (r'("[^"]*")|(\'[^\']*\')', string_token),
+ (r"==", eq_op_token()),
+ (r"!=", neq_op_token()),
+ (r"<=", le_op_token()),
+ (r">=", ge_op_token()),
+ (r"<", lt_op_token()),
+ (r">", gt_op_token()),
+ (r"\|\|", or_op_token()),
+ (r"!", not_op_token()),
+ (r"&&", and_op_token()),
+ (r"\(", lparen_token()),
+ (r"\)", rparen_token()),
+ (r"\s+", None), # skip whitespace
+ ])
+ tokens, remainder = ExpressionParser.scanner.scan(self.text)
+ for t in tokens:
+ yield t
+ yield end_token()
+
+ def value(self, ident):
+ """
+ Look up the value of |ident| in the value mapping passed in the
+ constructor.
+ """
+ if self.strict:
+ return self.valuemapping[ident]
+ else:
+ return self.valuemapping.get(ident, None)
+
+ def advance(self, expected):
+ """
+ Assert that the next token is an instance of |expected|, and advance
+ to the next token.
+ """
+ if not isinstance(self.token, expected):
+ raise Exception("Unexpected token!")
+ self.token = self.iter.next()
+
+ def expression(self, rbp=0):
+ """
+ Parse and return the value of an expression until a token with
+ right binding power greater than rbp is encountered.
+ """
+ t = self.token
+ self.token = self.iter.next()
+ left = t.nud(self)
+ while rbp < self.token.lbp:
+ t = self.token
+ self.token = self.iter.next()
+ left = t.led(self, left)
+ return left
+
+ def parse(self):
+ """
+ Parse and return the value of the expression in the text
+ passed to the constructor. Raises a ParseError if the expression
+ could not be parsed.
+ """
+ try:
+ self.iter = self._tokenize()
+ self.token = self.iter.next()
+ return self.expression()
+ except:
+ extype, ex, tb = sys.exc_info()
+ formatted = ''.join(traceback.format_exception_only(extype, ex))
+ raise ParseError("could not parse: "
+ "%s\nexception: %svariables: %s" % (self.text,
+ formatted,
+ self.valuemapping)), None, tb
+
+ __call__ = parse
+
+
+def parse(text, **values):
+ """
+ Parse and evaluate a boolean expression.
+ :param text: The expression to parse, as a string.
+ :param values: A dict containing a name to value mapping for identifiers
+ referenced in *text*.
+ :rtype: the final value of the expression.
+ :raises: :py:exc::ParseError: will be raised if parsing fails.
+ """
+ return ExpressionParser(text, values).parse()
diff --git a/testing/mozbase/manifestparser/manifestparser/filters.py b/testing/mozbase/manifestparser/manifestparser/filters.py
new file mode 100644
index 000000000..e832c0da6
--- /dev/null
+++ b/testing/mozbase/manifestparser/manifestparser/filters.py
@@ -0,0 +1,421 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+A filter is a callable that accepts an iterable of test objects and a
+dictionary of values, and returns a new iterable of test objects. It is
+possible to define custom filters if the built-in ones are not enough.
+"""
+
+from collections import defaultdict, MutableSequence
+import itertools
+import os
+
+from .expression import (
+ parse,
+ ParseError,
+)
+
+
+# built-in filters
+
+def skip_if(tests, values):
+ """
+ Sets disabled on all tests containing the `skip-if` tag and whose condition
+ is True. This filter is added by default.
+ """
+ tag = 'skip-if'
+ for test in tests:
+ if tag in test and parse(test[tag], **values):
+ test.setdefault('disabled', '{}: {}'.format(tag, test[tag]))
+ yield test
+
+
+def run_if(tests, values):
+ """
+ Sets disabled on all tests containing the `run-if` tag and whose condition
+ is False. This filter is added by default.
+ """
+ tag = 'run-if'
+ for test in tests:
+ if tag in test and not parse(test[tag], **values):
+ test.setdefault('disabled', '{}: {}'.format(tag, test[tag]))
+ yield test
+
+
+def fail_if(tests, values):
+ """
+ Sets expected to 'fail' on all tests containing the `fail-if` tag and whose
+ condition is True. This filter is added by default.
+ """
+ tag = 'fail-if'
+ for test in tests:
+ if tag in test and parse(test[tag], **values):
+ test['expected'] = 'fail'
+ yield test
+
+
+def enabled(tests, values):
+ """
+ Removes all tests containing the `disabled` key. This filter can be
+ added by passing `disabled=False` into `active_tests`.
+ """
+ for test in tests:
+ if 'disabled' not in test:
+ yield test
+
+
+def exists(tests, values):
+ """
+ Removes all tests that do not exist on the file system. This filter is
+ added by default, but can be removed by passing `exists=False` into
+ `active_tests`.
+ """
+ for test in tests:
+ if os.path.exists(test['path']):
+ yield test
+
+
+# built-in instance filters
+
+class InstanceFilter(object):
+ """
+ Generally only one instance of a class filter should be applied at a time.
+ Two instances of `InstanceFilter` are considered equal if they have the
+ same class name. This ensures only a single instance is ever added to
+ `filterlist`. This class also formats filters' __str__ method for easier
+ debugging.
+ """
+ unique = True
+
+ def __init__(self, *args, **kwargs):
+ self.fmt_args = ', '.join(itertools.chain(
+ [str(a) for a in args],
+ ['{}={}'.format(k, v) for k, v in kwargs.iteritems()]))
+
+ def __eq__(self, other):
+ if self.unique:
+ return self.__class__ == other.__class__
+ return self.__hash__() == other.__hash__()
+
+ def __str__(self):
+ return "{}({})".format(self.__class__.__name__, self.fmt_args)
+
+
+class subsuite(InstanceFilter):
+ """
+ If `name` is None, removes all tests that have a `subsuite` key.
+ Otherwise removes all tests that do not have a subsuite matching `name`.
+
+ It is possible to specify conditional subsuite keys using:
+ subsuite = foo,condition
+
+ where 'foo' is the subsuite name, and 'condition' is the same type of
+ condition used for skip-if. If the condition doesn't evaluate to true,
+ the subsuite designation will be removed from the test.
+
+ :param name: The name of the subsuite to run (default None)
+ """
+
+ def __init__(self, name=None):
+ InstanceFilter.__init__(self, name=name)
+ self.name = name
+
+ def __call__(self, tests, values):
+ # Look for conditional subsuites, and replace them with the subsuite
+ # itself (if the condition is true), or nothing.
+ for test in tests:
+ subsuite = test.get('subsuite', '')
+ if ',' in subsuite:
+ try:
+ subsuite, cond = subsuite.split(',')
+ except ValueError:
+ raise ParseError("subsuite condition can't contain commas")
+ matched = parse(cond, **values)
+ if matched:
+ test['subsuite'] = subsuite
+ else:
+ test['subsuite'] = ''
+
+ # Filter on current subsuite
+ if self.name is None:
+ if not test.get('subsuite'):
+ yield test
+ else:
+ if test.get('subsuite', '') == self.name:
+ yield test
+
+
+class chunk_by_slice(InstanceFilter):
+ """
+ Basic chunking algorithm that splits tests evenly across total chunks.
+
+ :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks
+ :param total_chunks: the total number of chunks
+ :param disabled: Whether to include disabled tests in the chunking
+ algorithm. If False, each chunk contains an equal number
+ of non-disabled tests. If True, each chunk contains an
+ equal number of tests (default False)
+ """
+
+ def __init__(self, this_chunk, total_chunks, disabled=False):
+ assert 1 <= this_chunk <= total_chunks
+ InstanceFilter.__init__(self, this_chunk, total_chunks,
+ disabled=disabled)
+ self.this_chunk = this_chunk
+ self.total_chunks = total_chunks
+ self.disabled = disabled
+
+ def __call__(self, tests, values):
+ tests = list(tests)
+ if self.disabled:
+ chunk_tests = tests[:]
+ else:
+ chunk_tests = [t for t in tests if 'disabled' not in t]
+
+ tests_per_chunk = float(len(chunk_tests)) / self.total_chunks
+ start = int(round((self.this_chunk - 1) * tests_per_chunk))
+ end = int(round(self.this_chunk * tests_per_chunk))
+
+ if not self.disabled:
+ # map start and end back onto original list of tests. Disabled
+ # tests will still be included in the returned list, but each
+ # chunk will contain an equal number of enabled tests.
+ if self.this_chunk == 1:
+ start = 0
+ elif start < len(chunk_tests):
+ start = tests.index(chunk_tests[start])
+
+ if self.this_chunk == self.total_chunks:
+ end = len(tests)
+ elif end < len(chunk_tests):
+ end = tests.index(chunk_tests[end])
+ return (t for t in tests[start:end])
+
+
+class chunk_by_dir(InstanceFilter):
+ """
+ Basic chunking algorithm that splits directories of tests evenly at a
+ given depth.
+
+ For example, a depth of 2 means all test directories two path nodes away
+ from the base are gathered, then split evenly across the total number of
+ chunks. The number of tests in each of the directories is not taken into
+ account (so chunks will not contain an even number of tests). All test
+ paths must be relative to the same root (typically the root of the source
+ repository).
+
+ :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks
+ :param total_chunks: the total number of chunks
+ :param depth: the minimum depth of a subdirectory before it will be
+ considered unique
+ """
+
+ def __init__(self, this_chunk, total_chunks, depth):
+ InstanceFilter.__init__(self, this_chunk, total_chunks, depth)
+ self.this_chunk = this_chunk
+ self.total_chunks = total_chunks
+ self.depth = depth
+
+ def __call__(self, tests, values):
+ tests_by_dir = defaultdict(list)
+ ordered_dirs = []
+ for test in tests:
+ path = test['relpath']
+
+ if path.startswith(os.sep):
+ path = path[1:]
+
+ dirs = path.split(os.sep)
+ dirs = dirs[:min(self.depth, len(dirs) - 1)]
+ path = os.sep.join(dirs)
+
+ # don't count directories that only have disabled tests in them,
+ # but still yield disabled tests that are alongside enabled tests
+ if path not in ordered_dirs and 'disabled' not in test:
+ ordered_dirs.append(path)
+ tests_by_dir[path].append(test)
+
+ tests_per_chunk = float(len(ordered_dirs)) / self.total_chunks
+ start = int(round((self.this_chunk - 1) * tests_per_chunk))
+ end = int(round(self.this_chunk * tests_per_chunk))
+
+ for i in range(start, end):
+ for test in tests_by_dir.pop(ordered_dirs[i]):
+ yield test
+
+ # find directories that only contain disabled tests. They still need to
+ # be yielded for reporting purposes. Put them all in chunk 1 for
+ # simplicity.
+ if self.this_chunk == 1:
+ disabled_dirs = [v for k, v in tests_by_dir.iteritems()
+ if k not in ordered_dirs]
+ for disabled_test in itertools.chain(*disabled_dirs):
+ yield disabled_test
+
+
+class chunk_by_runtime(InstanceFilter):
+ """
+ Chunking algorithm that attempts to group tests into chunks based on their
+ average runtimes. It keeps manifests of tests together and pairs slow
+ running manifests with fast ones.
+
+ :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks
+ :param total_chunks: the total number of chunks
+ :param runtimes: dictionary of test runtime data, of the form
+ {<test path>: <average runtime>}
+ :param default_runtime: value in seconds to assign tests that don't exist
+ in the runtimes file
+ """
+
+ def __init__(self, this_chunk, total_chunks, runtimes, default_runtime=0):
+ InstanceFilter.__init__(self, this_chunk, total_chunks, runtimes,
+ default_runtime=default_runtime)
+ self.this_chunk = this_chunk
+ self.total_chunks = total_chunks
+
+ # defaultdict(lambda:<int>) assigns all non-existent keys the value of
+ # <int>. This means all tests we encounter that don't exist in the
+ # runtimes file will be assigned `default_runtime`.
+ self.runtimes = defaultdict(lambda: default_runtime)
+ self.runtimes.update(runtimes)
+
+ def __call__(self, tests, values):
+ tests = list(tests)
+ manifests = set(t['manifest'] for t in tests)
+
+ def total_runtime(tests):
+ return sum(self.runtimes[t['relpath']] for t in tests
+ if 'disabled' not in t)
+
+ tests_by_manifest = []
+ for manifest in manifests:
+ mtests = [t for t in tests if t['manifest'] == manifest]
+ tests_by_manifest.append((total_runtime(mtests), mtests))
+ tests_by_manifest.sort(reverse=True)
+
+ tests_by_chunk = [[0, []] for i in range(self.total_chunks)]
+ for runtime, batch in tests_by_manifest:
+ # sort first by runtime, then by number of tests in case of a tie.
+ # This guarantees the chunk with the fastest runtime will always
+ # get the next batch of tests.
+ tests_by_chunk.sort(key=lambda x: (x[0], len(x[1])))
+ tests_by_chunk[0][0] += runtime
+ tests_by_chunk[0][1].extend(batch)
+
+ return (t for t in tests_by_chunk[self.this_chunk - 1][1])
+
+
+class tags(InstanceFilter):
+ """
+ Removes tests that don't contain any of the given tags. This overrides
+ InstanceFilter's __eq__ method, so multiple instances can be added.
+ Multiple tag filters is equivalent to joining tags with the AND operator.
+
+ To define a tag in a manifest, add a `tags` attribute to a test or DEFAULT
+ section. Tests can have multiple tags, in which case they should be
+ whitespace delimited. For example:
+
+ [test_foobar.html]
+ tags = foo bar
+
+ :param tags: A tag or list of tags to filter tests on
+ """
+ unique = False
+
+ def __init__(self, tags):
+ InstanceFilter.__init__(self, tags)
+ if isinstance(tags, basestring):
+ tags = [tags]
+ self.tags = tags
+
+ def __call__(self, tests, values):
+ for test in tests:
+ if 'tags' not in test:
+ continue
+
+ test_tags = [t.strip() for t in test['tags'].split()]
+ if any(t in self.tags for t in test_tags):
+ yield test
+
+
+class pathprefix(InstanceFilter):
+ """
+ Removes tests that don't start with any of the given test paths.
+
+ :param paths: A list of test paths to filter on
+ """
+
+ def __init__(self, paths):
+ InstanceFilter.__init__(self, paths)
+ if isinstance(paths, basestring):
+ paths = [paths]
+ self.paths = paths
+
+ def __call__(self, tests, values):
+ for test in tests:
+ for tp in self.paths:
+ tp = os.path.normpath(tp)
+
+ path = test['relpath']
+ if os.path.isabs(tp):
+ path = test['path']
+
+ if not os.path.normpath(path).startswith(tp):
+ continue
+
+ # any test path that points to a single file will be run no
+ # matter what, even if it's disabled
+ if 'disabled' in test and os.path.normpath(test['relpath']) == tp:
+ del test['disabled']
+ yield test
+ break
+
+
+# filter container
+
+DEFAULT_FILTERS = (
+ skip_if,
+ run_if,
+ fail_if,
+)
+"""
+By default :func:`~.active_tests` will run the :func:`~.skip_if`,
+:func:`~.run_if` and :func:`~.fail_if` filters.
+"""
+
+
+class filterlist(MutableSequence):
+ """
+ A MutableSequence that raises TypeError when adding a non-callable and
+ ValueError if the item is already added.
+ """
+
+ def __init__(self, items=None):
+ self.items = []
+ if items:
+ self.items = list(items)
+
+ def _validate(self, item):
+ if not callable(item):
+ raise TypeError("Filters must be callable!")
+ if item in self:
+ raise ValueError("Filter {} is already applied!".format(item))
+
+ def __getitem__(self, key):
+ return self.items[key]
+
+ def __setitem__(self, key, value):
+ self._validate(value)
+ self.items[key] = value
+
+ def __delitem__(self, key):
+ del self.items[key]
+
+ def __len__(self):
+ return len(self.items)
+
+ def insert(self, index, value):
+ self._validate(value)
+ self.items.insert(index, value)
diff --git a/testing/mozbase/manifestparser/manifestparser/ini.py b/testing/mozbase/manifestparser/manifestparser/ini.py
new file mode 100644
index 000000000..5117dd1ae
--- /dev/null
+++ b/testing/mozbase/manifestparser/manifestparser/ini.py
@@ -0,0 +1,142 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+__all__ = ['read_ini', 'combine_fields']
+
+
+def read_ini(fp, variables=None, default='DEFAULT', defaults_only=False,
+ comments=';#', separators=('=', ':'), strict=True,
+ handle_defaults=True):
+ """
+ read an .ini file and return a list of [(section, values)]
+ - fp : file pointer or path to read
+ - variables : default set of variables
+ - default : name of the section for the default section
+ - defaults_only : if True, return the default section only
+ - comments : characters that if they start a line denote a comment
+ - separators : strings that denote key, value separation in order
+ - strict : whether to be strict about parsing
+ - handle_defaults : whether to incorporate defaults into each section
+ """
+
+ # variables
+ variables = variables or {}
+ sections = []
+ key = value = None
+ section_names = set()
+ if isinstance(fp, basestring):
+ fp = file(fp)
+
+ # read the lines
+ for (linenum, line) in enumerate(fp.read().splitlines(), start=1):
+
+ stripped = line.strip()
+
+ # ignore blank lines
+ if not stripped:
+ # reset key and value to avoid continuation lines
+ key = value = None
+ continue
+
+ # ignore comment lines
+ if stripped[0] in comments:
+ continue
+
+ # check for a new section
+ if len(stripped) > 2 and stripped[0] == '[' and stripped[-1] == ']':
+ section = stripped[1:-1].strip()
+ key = value = None
+
+ # deal with DEFAULT section
+ if section.lower() == default.lower():
+ if strict:
+ assert default not in section_names
+ section_names.add(default)
+ current_section = variables
+ continue
+
+ if strict:
+ # make sure this section doesn't already exist
+ assert section not in section_names, "Section '%s' already found in '%s'" % (
+ section, section_names)
+
+ section_names.add(section)
+ current_section = {}
+ sections.append((section, current_section))
+ continue
+
+ # if there aren't any sections yet, something bad happen
+ if not section_names:
+ raise Exception('No sections found')
+
+ # (key, value) pair
+ for separator in separators:
+ if separator in stripped:
+ key, value = stripped.split(separator, 1)
+ key = key.strip()
+ value = value.strip()
+
+ if strict:
+ # make sure this key isn't already in the section or empty
+ assert key
+ if current_section is not variables:
+ assert key not in current_section
+
+ current_section[key] = value
+ break
+ else:
+ # continuation line ?
+ if line[0].isspace() and key:
+ value = '%s%s%s' % (value, os.linesep, stripped)
+ current_section[key] = value
+ else:
+ # something bad happened!
+ if hasattr(fp, 'name'):
+ filename = fp.name
+ else:
+ filename = 'unknown'
+ raise Exception("Error parsing manifest file '%s', line %s" %
+ (filename, linenum))
+
+ # server-root is a special os path declared relative to the manifest file.
+ # inheritance demands we expand it as absolute
+ if 'server-root' in variables:
+ root = os.path.join(os.path.dirname(fp.name),
+ variables['server-root'])
+ variables['server-root'] = os.path.abspath(root)
+
+ # return the default section only if requested
+ if defaults_only:
+ return [(default, variables)]
+
+ global_vars = variables if handle_defaults else {}
+ sections = [(i, combine_fields(global_vars, j)) for i, j in sections]
+ return sections
+
+
+def combine_fields(global_vars, local_vars):
+ """
+ Combine the given manifest entries according to the semantics of specific fields.
+ This is used to combine manifest level defaults with a per-test definition.
+ """
+ if not global_vars:
+ return local_vars
+ if not local_vars:
+ return global_vars
+ field_patterns = {
+ 'skip-if': '(%s) || (%s)',
+ 'support-files': '%s %s',
+ }
+ final_mapping = global_vars.copy()
+ for field_name, value in local_vars.items():
+ if field_name not in field_patterns or field_name not in global_vars:
+ final_mapping[field_name] = value
+ continue
+ global_value = global_vars[field_name]
+ pattern = field_patterns[field_name]
+ final_mapping[field_name] = pattern % (
+ global_value.split('#')[0], value.split('#')[0])
+ return final_mapping
diff --git a/testing/mozbase/manifestparser/manifestparser/manifestparser.py b/testing/mozbase/manifestparser/manifestparser/manifestparser.py
new file mode 100644
index 000000000..23f14d3f8
--- /dev/null
+++ b/testing/mozbase/manifestparser/manifestparser/manifestparser.py
@@ -0,0 +1,804 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from StringIO import StringIO
+import json
+import fnmatch
+import os
+import shutil
+import sys
+import types
+
+from .ini import read_ini
+from .filters import (
+ DEFAULT_FILTERS,
+ enabled,
+ exists as _exists,
+ filterlist,
+)
+
+__all__ = ['ManifestParser', 'TestManifest', 'convert']
+
+relpath = os.path.relpath
+string = (basestring,)
+
+
+# path normalization
+
+def normalize_path(path):
+ """normalize a relative path"""
+ if sys.platform.startswith('win'):
+ return path.replace('/', os.path.sep)
+ return path
+
+
+def denormalize_path(path):
+ """denormalize a relative path"""
+ if sys.platform.startswith('win'):
+ return path.replace(os.path.sep, '/')
+ return path
+
+
+# objects for parsing manifests
+
+class ManifestParser(object):
+ """read .ini manifests"""
+
+ def __init__(self, manifests=(), defaults=None, strict=True, rootdir=None,
+ finder=None, handle_defaults=True):
+ """Creates a ManifestParser from the given manifest files.
+
+ :param manifests: An iterable of file paths or file objects corresponding
+ to manifests. If a file path refers to a manifest file that
+ does not exist, an IOError is raised.
+ :param defaults: Variables to pre-define in the environment for evaluating
+ expressions in manifests.
+ :param strict: If False, the provided manifests may contain references to
+ listed (test) files that do not exist without raising an
+ IOError during reading, and certain errors in manifests
+ are not considered fatal. Those errors include duplicate
+ section names, redefining variables, and defining empty
+ variables.
+ :param rootdir: The directory used as the basis for conversion to and from
+ relative paths during manifest reading.
+ :param finder: If provided, this finder object will be used for filesystem
+ interactions. Finder objects are part of the mozpack package,
+ documented at
+ http://gecko.readthedocs.org/en/latest/python/mozpack.html#module-mozpack.files
+ :param handle_defaults: If not set, do not propagate manifest defaults to individual
+ test objects. Callers are expected to manage per-manifest
+ defaults themselves via the manifest_defaults member
+ variable in this case.
+ """
+ self._defaults = defaults or {}
+ self._ancestor_defaults = {}
+ self.tests = []
+ self.manifest_defaults = {}
+ self.strict = strict
+ self.rootdir = rootdir
+ self.relativeRoot = None
+ self.finder = finder
+ self._handle_defaults = handle_defaults
+ if manifests:
+ self.read(*manifests)
+
+ def path_exists(self, path):
+ if self.finder:
+ return self.finder.get(path) is not None
+ return os.path.exists(path)
+
+ # methods for reading manifests
+
+ def _read(self, root, filename, defaults, defaults_only=False, parentmanifest=None):
+ """
+ Internal recursive method for reading and parsing manifests.
+ Stores all found tests in self.tests
+ :param root: The base path
+ :param filename: File object or string path for the base manifest file
+ :param defaults: Options that apply to all items
+ :param defaults_only: If True will only gather options, not include
+ tests. Used for upstream parent includes
+ (default False)
+ :param parentmanifest: Filename of the parent manifest (default None)
+ """
+ def read_file(type):
+ include_file = section.split(type, 1)[-1]
+ include_file = normalize_path(include_file)
+ if not os.path.isabs(include_file):
+ include_file = os.path.join(here, include_file)
+ if not self.path_exists(include_file):
+ message = "Included file '%s' does not exist" % include_file
+ if self.strict:
+ raise IOError(message)
+ else:
+ sys.stderr.write("%s\n" % message)
+ return
+ return include_file
+
+ # get directory of this file if not file-like object
+ if isinstance(filename, string):
+ # If we're using mercurial as our filesystem via a finder
+ # during manifest reading, the getcwd() calls that happen
+ # with abspath calls will not be meaningful, so absolute
+ # paths are required.
+ if self.finder:
+ assert os.path.isabs(filename)
+ filename = os.path.abspath(filename)
+ if self.finder:
+ fp = self.finder.get(filename)
+ else:
+ fp = open(filename)
+ here = os.path.dirname(filename)
+ else:
+ fp = filename
+ filename = here = None
+ defaults['here'] = here
+
+ # Rootdir is needed for relative path calculation. Precompute it for
+ # the microoptimization used below.
+ if self.rootdir is None:
+ rootdir = ""
+ else:
+ assert os.path.isabs(self.rootdir)
+ rootdir = self.rootdir + os.path.sep
+
+ # read the configuration
+ sections = read_ini(fp=fp, variables=defaults, strict=self.strict,
+ handle_defaults=self._handle_defaults)
+ self.manifest_defaults[filename] = defaults
+
+ parent_section_found = False
+
+ # get the tests
+ for section, data in sections:
+ # In case of defaults only, no other section than parent: has to
+ # be processed.
+ if defaults_only and not section.startswith('parent:'):
+ continue
+
+ # read the parent manifest if specified
+ if section.startswith('parent:'):
+ parent_section_found = True
+
+ include_file = read_file('parent:')
+ if include_file:
+ self._read(root, include_file, {}, True)
+ continue
+
+ # a file to include
+ # TODO: keep track of included file structure:
+ # self.manifests = {'manifest.ini': 'relative/path.ini'}
+ if section.startswith('include:'):
+ include_file = read_file('include:')
+ if include_file:
+ include_defaults = data.copy()
+ self._read(root, include_file, include_defaults, parentmanifest=filename)
+ continue
+
+ # otherwise an item
+ # apply ancestor defaults, while maintaining current file priority
+ data = dict(self._ancestor_defaults.items() + data.items())
+
+ test = data
+ test['name'] = section
+
+ # Will be None if the manifest being read is a file-like object.
+ test['manifest'] = filename
+
+ # determine the path
+ path = test.get('path', section)
+ _relpath = path
+ if '://' not in path: # don't futz with URLs
+ path = normalize_path(path)
+ if here and not os.path.isabs(path):
+ # Profiling indicates 25% of manifest parsing is spent
+ # in this call to normpath, but almost all calls return
+ # their argument unmodified, so we avoid the call if
+ # '..' if not present in the path.
+ path = os.path.join(here, path)
+ if '..' in path:
+ path = os.path.normpath(path)
+
+ # Microoptimization, because relpath is quite expensive.
+ # We know that rootdir is an absolute path or empty. If path
+ # starts with rootdir, then path is also absolute and the tail
+ # of the path is the relative path (possibly non-normalized,
+ # when here is unknown).
+ # For this to work rootdir needs to be terminated with a path
+ # separator, so that references to sibling directories with
+ # a common prefix don't get misscomputed (e.g. /root and
+ # /rootbeer/file).
+ # When the rootdir is unknown, the relpath needs to be left
+ # unchanged. We use an empty string as rootdir in that case,
+ # which leaves relpath unchanged after slicing.
+ if path.startswith(rootdir):
+ _relpath = path[len(rootdir):]
+ else:
+ _relpath = relpath(path, rootdir)
+
+ test['path'] = path
+ test['relpath'] = _relpath
+
+ if parentmanifest is not None:
+ # If a test was included by a parent manifest we may need to
+ # indicate that in the test object for the sake of identifying
+ # a test, particularly in the case a test file is included by
+ # multiple manifests.
+ test['ancestor-manifest'] = parentmanifest
+
+ # append the item
+ self.tests.append(test)
+
+ # if no parent: section was found for defaults-only, only read the
+ # defaults section of the manifest without interpreting variables
+ if defaults_only and not parent_section_found:
+ sections = read_ini(fp=fp, variables=defaults, defaults_only=True,
+ strict=self.strict)
+ (section, self._ancestor_defaults) = sections[0]
+
+ def read(self, *filenames, **defaults):
+ """
+ read and add manifests from file paths or file-like objects
+
+ filenames -- file paths or file-like objects to read as manifests
+ defaults -- default variables
+ """
+
+ # ensure all files exist
+ missing = [filename for filename in filenames
+ if isinstance(filename, string) and not self.path_exists(filename)]
+ if missing:
+ raise IOError('Missing files: %s' % ', '.join(missing))
+
+ # default variables
+ _defaults = defaults.copy() or self._defaults.copy()
+ _defaults.setdefault('here', None)
+
+ # process each file
+ for filename in filenames:
+ # set the per file defaults
+ defaults = _defaults.copy()
+ here = None
+ if isinstance(filename, string):
+ here = os.path.dirname(os.path.abspath(filename))
+ defaults['here'] = here # directory of master .ini file
+
+ if self.rootdir is None:
+ # set the root directory
+ # == the directory of the first manifest given
+ self.rootdir = here
+
+ self._read(here, filename, defaults)
+
+ # methods for querying manifests
+
+ def query(self, *checks, **kw):
+ """
+ general query function for tests
+ - checks : callable conditions to test if the test fulfills the query
+ """
+ tests = kw.get('tests', None)
+ if tests is None:
+ tests = self.tests
+ retval = []
+ for test in tests:
+ for check in checks:
+ if not check(test):
+ break
+ else:
+ retval.append(test)
+ return retval
+
+ def get(self, _key=None, inverse=False, tags=None, tests=None, **kwargs):
+ # TODO: pass a dict instead of kwargs since you might hav
+ # e.g. 'inverse' as a key in the dict
+
+ # TODO: tags should just be part of kwargs with None values
+ # (None == any is kinda weird, but probably still better)
+
+ # fix up tags
+ if tags:
+ tags = set(tags)
+ else:
+ tags = set()
+
+ # make some check functions
+ if inverse:
+ def has_tags(test):
+ return not tags.intersection(test.keys())
+
+ def dict_query(test):
+ for key, value in kwargs.items():
+ if test.get(key) == value:
+ return False
+ return True
+ else:
+ def has_tags(test):
+ return tags.issubset(test.keys())
+
+ def dict_query(test):
+ for key, value in kwargs.items():
+ if test.get(key) != value:
+ return False
+ return True
+
+ # query the tests
+ tests = self.query(has_tags, dict_query, tests=tests)
+
+ # if a key is given, return only a list of that key
+ # useful for keys like 'name' or 'path'
+ if _key:
+ return [test[_key] for test in tests]
+
+ # return the tests
+ return tests
+
+ def manifests(self, tests=None):
+ """
+ return manifests in order in which they appear in the tests
+ """
+ if tests is None:
+ # Make sure to return all the manifests, even ones without tests.
+ return self.manifest_defaults.keys()
+
+ manifests = []
+ for test in tests:
+ manifest = test.get('manifest')
+ if not manifest:
+ continue
+ if manifest not in manifests:
+ manifests.append(manifest)
+ return manifests
+
+ def paths(self):
+ return [i['path'] for i in self.tests]
+
+ # methods for auditing
+
+ def missing(self, tests=None):
+ """
+ return list of tests that do not exist on the filesystem
+ """
+ if tests is None:
+ tests = self.tests
+ existing = list(_exists(tests, {}))
+ return [t for t in tests if t not in existing]
+
+ def check_missing(self, tests=None):
+ missing = self.missing(tests=tests)
+ if missing:
+ missing_paths = [test['path'] for test in missing]
+ if self.strict:
+ raise IOError("Strict mode enabled, test paths must exist. "
+ "The following test(s) are missing: %s" %
+ json.dumps(missing_paths, indent=2))
+ print >> sys.stderr, "Warning: The following test(s) are missing: %s" % \
+ json.dumps(missing_paths, indent=2)
+ return missing
+
+ def verifyDirectory(self, directories, pattern=None, extensions=None):
+ """
+ checks what is on the filesystem vs what is in a manifest
+ returns a 2-tuple of sets:
+ (missing_from_filesystem, missing_from_manifest)
+ """
+
+ files = set([])
+ if isinstance(directories, basestring):
+ directories = [directories]
+
+ # get files in directories
+ for directory in directories:
+ for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
+
+ # only add files that match a pattern
+ if pattern:
+ filenames = fnmatch.filter(filenames, pattern)
+
+ # only add files that have one of the extensions
+ if extensions:
+ filenames = [filename for filename in filenames
+ if os.path.splitext(filename)[-1] in extensions]
+
+ files.update([os.path.join(dirpath, filename) for filename in filenames])
+
+ paths = set(self.paths())
+ missing_from_filesystem = paths.difference(files)
+ missing_from_manifest = files.difference(paths)
+ return (missing_from_filesystem, missing_from_manifest)
+
+ # methods for output
+
+ def write(self, fp=sys.stdout, rootdir=None,
+ global_tags=None, global_kwargs=None,
+ local_tags=None, local_kwargs=None):
+ """
+ write a manifest given a query
+ global and local options will be munged to do the query
+ globals will be written to the top of the file
+ locals (if given) will be written per test
+ """
+
+ # open file if `fp` given as string
+ close = False
+ if isinstance(fp, string):
+ fp = file(fp, 'w')
+ close = True
+
+ # root directory
+ if rootdir is None:
+ rootdir = self.rootdir
+
+ # sanitize input
+ global_tags = global_tags or set()
+ local_tags = local_tags or set()
+ global_kwargs = global_kwargs or {}
+ local_kwargs = local_kwargs or {}
+
+ # create the query
+ tags = set([])
+ tags.update(global_tags)
+ tags.update(local_tags)
+ kwargs = {}
+ kwargs.update(global_kwargs)
+ kwargs.update(local_kwargs)
+
+ # get matching tests
+ tests = self.get(tags=tags, **kwargs)
+
+ # print the .ini manifest
+ if global_tags or global_kwargs:
+ print >> fp, '[DEFAULT]'
+ for tag in global_tags:
+ print >> fp, '%s =' % tag
+ for key, value in global_kwargs.items():
+ print >> fp, '%s = %s' % (key, value)
+ print >> fp
+
+ for test in tests:
+ test = test.copy() # don't overwrite
+
+ path = test['name']
+ if not os.path.isabs(path):
+ path = test['path']
+ if self.rootdir:
+ path = relpath(test['path'], self.rootdir)
+ path = denormalize_path(path)
+ print >> fp, '[%s]' % path
+
+ # reserved keywords:
+ reserved = ['path', 'name', 'here', 'manifest', 'relpath', 'ancestor-manifest']
+ for key in sorted(test.keys()):
+ if key in reserved:
+ continue
+ if key in global_kwargs:
+ continue
+ if key in global_tags and not test[key]:
+ continue
+ print >> fp, '%s = %s' % (key, test[key])
+ print >> fp
+
+ if close:
+ # close the created file
+ fp.close()
+
+ def __str__(self):
+ fp = StringIO()
+ self.write(fp=fp)
+ value = fp.getvalue()
+ return value
+
+ def copy(self, directory, rootdir=None, *tags, **kwargs):
+ """
+ copy the manifests and associated tests
+ - directory : directory to copy to
+ - rootdir : root directory to copy to (if not given from manifests)
+ - tags : keywords the tests must have
+ - kwargs : key, values the tests must match
+ """
+ # XXX note that copy does *not* filter the tests out of the
+ # resulting manifest; it just stupidly copies them over.
+ # ideally, it would reread the manifests and filter out the
+ # tests that don't match *tags and **kwargs
+
+ # destination
+ if not os.path.exists(directory):
+ os.path.makedirs(directory)
+ else:
+ # sanity check
+ assert os.path.isdir(directory)
+
+ # tests to copy
+ tests = self.get(tags=tags, **kwargs)
+ if not tests:
+ return # nothing to do!
+
+ # root directory
+ if rootdir is None:
+ rootdir = self.rootdir
+
+ # copy the manifests + tests
+ manifests = [relpath(manifest, rootdir) for manifest in self.manifests()]
+ for manifest in manifests:
+ destination = os.path.join(directory, manifest)
+ dirname = os.path.dirname(destination)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ else:
+ # sanity check
+ assert os.path.isdir(dirname)
+ shutil.copy(os.path.join(rootdir, manifest), destination)
+
+ missing = self.check_missing(tests)
+ tests = [test for test in tests if test not in missing]
+ for test in tests:
+ if os.path.isabs(test['name']):
+ continue
+ source = test['path']
+ destination = os.path.join(directory, relpath(test['path'], rootdir))
+ shutil.copy(source, destination)
+ # TODO: ensure that all of the tests are below the from_dir
+
+ def update(self, from_dir, rootdir=None, *tags, **kwargs):
+ """
+ update the tests as listed in a manifest from a directory
+ - from_dir : directory where the tests live
+ - rootdir : root directory to copy to (if not given from manifests)
+ - tags : keys the tests must have
+ - kwargs : key, values the tests must match
+ """
+
+ # get the tests
+ tests = self.get(tags=tags, **kwargs)
+
+ # get the root directory
+ if not rootdir:
+ rootdir = self.rootdir
+
+ # copy them!
+ for test in tests:
+ if not os.path.isabs(test['name']):
+ _relpath = relpath(test['path'], rootdir)
+ source = os.path.join(from_dir, _relpath)
+ if not os.path.exists(source):
+ message = "Missing test: '%s' does not exist!"
+ if self.strict:
+ raise IOError(message)
+ print >> sys.stderr, message + " Skipping."
+ continue
+ destination = os.path.join(rootdir, _relpath)
+ shutil.copy(source, destination)
+
+ # directory importers
+
+ @classmethod
+ def _walk_directories(cls, directories, callback, pattern=None, ignore=()):
+ """
+ internal function to import directories
+ """
+
+ if isinstance(pattern, basestring):
+ patterns = [pattern]
+ else:
+ patterns = pattern
+ ignore = set(ignore)
+
+ if not patterns:
+ def accept_filename(filename):
+ return True
+ else:
+ def accept_filename(filename):
+ for pattern in patterns:
+ if fnmatch.fnmatch(filename, pattern):
+ return True
+
+ if not ignore:
+ def accept_dirname(dirname):
+ return True
+ else:
+ def accept_dirname(dirname):
+ return dirname not in ignore
+
+ rootdirectories = directories[:]
+ seen_directories = set()
+ for rootdirectory in rootdirectories:
+ # let's recurse directories using list
+ directories = [os.path.realpath(rootdirectory)]
+ while directories:
+ directory = directories.pop(0)
+ if directory in seen_directories:
+ # eliminate possible infinite recursion due to
+ # symbolic links
+ continue
+ seen_directories.add(directory)
+
+ files = []
+ subdirs = []
+ for name in sorted(os.listdir(directory)):
+ path = os.path.join(directory, name)
+ if os.path.isfile(path):
+ # os.path.isfile follow symbolic links, we don't
+ # need to handle them here.
+ if accept_filename(name):
+ files.append(name)
+ continue
+ elif os.path.islink(path):
+ # eliminate symbolic links
+ path = os.path.realpath(path)
+
+ # we must have a directory here
+ if accept_dirname(name):
+ subdirs.append(name)
+ # this subdir is added for recursion
+ directories.insert(0, path)
+
+ # here we got all subdirs and files filtered, we can
+ # call the callback function if directory is not empty
+ if subdirs or files:
+ callback(rootdirectory, directory, subdirs, files)
+
+ @classmethod
+ def populate_directory_manifests(cls, directories, filename, pattern=None, ignore=(),
+ overwrite=False):
+ """
+ walks directories and writes manifests of name `filename` in-place;
+ returns `cls` instance populated with the given manifests
+
+ filename -- filename of manifests to write
+ pattern -- shell pattern (glob) or patterns of filenames to match
+ ignore -- directory names to ignore
+ overwrite -- whether to overwrite existing files of given name
+ """
+
+ manifest_dict = {}
+
+ if os.path.basename(filename) != filename:
+ raise IOError("filename should not include directory name")
+
+ # no need to hit directories more than once
+ _directories = directories
+ directories = []
+ for directory in _directories:
+ if directory not in directories:
+ directories.append(directory)
+
+ def callback(directory, dirpath, dirnames, filenames):
+ """write a manifest for each directory"""
+
+ manifest_path = os.path.join(dirpath, filename)
+ if (dirnames or filenames) and not (os.path.exists(manifest_path) and overwrite):
+ with file(manifest_path, 'w') as manifest:
+ for dirname in dirnames:
+ print >> manifest, '[include:%s]' % os.path.join(dirname, filename)
+ for _filename in filenames:
+ print >> manifest, '[%s]' % _filename
+
+ # add to list of manifests
+ manifest_dict.setdefault(directory, manifest_path)
+
+ # walk the directories to gather files
+ cls._walk_directories(directories, callback, pattern=pattern, ignore=ignore)
+ # get manifests
+ manifests = [manifest_dict[directory] for directory in _directories]
+
+ # create a `cls` instance with the manifests
+ return cls(manifests=manifests)
+
+ @classmethod
+ def from_directories(cls, directories, pattern=None, ignore=(), write=None, relative_to=None):
+ """
+ convert directories to a simple manifest; returns ManifestParser instance
+
+ pattern -- shell pattern (glob) or patterns of filenames to match
+ ignore -- directory names to ignore
+ write -- filename or file-like object of manifests to write;
+ if `None` then a StringIO instance will be created
+ relative_to -- write paths relative to this path;
+ if false then the paths are absolute
+ """
+
+ # determine output
+ opened_manifest_file = None # name of opened manifest file
+ absolute = not relative_to # whether to output absolute path names as names
+ if isinstance(write, string):
+ opened_manifest_file = write
+ write = file(write, 'w')
+ if write is None:
+ write = StringIO()
+
+ # walk the directories, generating manifests
+ def callback(directory, dirpath, dirnames, filenames):
+
+ # absolute paths
+ filenames = [os.path.join(dirpath, filename)
+ for filename in filenames]
+ # ensure new manifest isn't added
+ filenames = [filename for filename in filenames
+ if filename != opened_manifest_file]
+ # normalize paths
+ if not absolute and relative_to:
+ filenames = [relpath(filename, relative_to)
+ for filename in filenames]
+
+ # write to manifest
+ print >> write, '\n'.join(['[%s]' % denormalize_path(filename)
+ for filename in filenames])
+
+ cls._walk_directories(directories, callback, pattern=pattern, ignore=ignore)
+
+ if opened_manifest_file:
+ # close file
+ write.close()
+ manifests = [opened_manifest_file]
+ else:
+ # manifests/write is a file-like object;
+ # rewind buffer
+ write.flush()
+ write.seek(0)
+ manifests = [write]
+
+ # make a ManifestParser instance
+ return cls(manifests=manifests)
+
+convert = ManifestParser.from_directories
+
+
+class TestManifest(ManifestParser):
+ """
+ apply logic to manifests; this is your integration layer :)
+ specific harnesses may subclass from this if they need more logic
+ """
+
+ def __init__(self, *args, **kwargs):
+ ManifestParser.__init__(self, *args, **kwargs)
+ self.filters = filterlist(DEFAULT_FILTERS)
+ self.last_used_filters = []
+
+ def active_tests(self, exists=True, disabled=True, filters=None, **values):
+ """
+ Run all applied filters on the set of tests.
+
+ :param exists: filter out non-existing tests (default True)
+ :param disabled: whether to return disabled tests (default True)
+ :param values: keys and values to filter on (e.g. `os = linux mac`)
+ :param filters: list of filters to apply to the tests
+ :returns: list of test objects that were not filtered out
+ """
+ tests = [i.copy() for i in self.tests] # shallow copy
+
+ # mark all tests as passing
+ for test in tests:
+ test['expected'] = test.get('expected', 'pass')
+
+ # make a copy so original doesn't get modified
+ fltrs = self.filters[:]
+ if exists:
+ if self.strict:
+ self.check_missing(tests)
+ else:
+ fltrs.append(_exists)
+
+ if not disabled:
+ fltrs.append(enabled)
+
+ if filters:
+ fltrs += filters
+
+ self.last_used_filters = fltrs[:]
+ for fn in fltrs:
+ tests = fn(tests, values)
+ return list(tests)
+
+ def test_paths(self):
+ return [test['path'] for test in self.active_tests()]
+
+ def fmt_filters(self, filters=None):
+ filters = filters or self.last_used_filters
+ names = []
+ for f in filters:
+ if isinstance(f, types.FunctionType):
+ names.append(f.__name__)
+ else:
+ names.append(str(f))
+ return ', '.join(names)
diff --git a/testing/mozbase/manifestparser/setup.py b/testing/mozbase/manifestparser/setup.py
new file mode 100644
index 000000000..b34f9cea7
--- /dev/null
+++ b/testing/mozbase/manifestparser/setup.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_NAME = "manifestparser"
+PACKAGE_VERSION = '1.1'
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Library to create and manage test manifests",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla manifests',
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ zip_safe=False,
+ packages=['manifestparser'],
+ install_requires=[],
+ entry_points="""
+ [console_scripts]
+ manifestparser = manifestparser.cli:main
+ """,
+ )
diff --git a/testing/mozbase/manifestparser/tests/comment-example.ini b/testing/mozbase/manifestparser/tests/comment-example.ini
new file mode 100644
index 000000000..030ceffdb
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/comment-example.ini
@@ -0,0 +1,11 @@
+; See https://bugzilla.mozilla.org/show_bug.cgi?id=813674
+
+[test_0180_fileInUse_xp_win_complete.js]
+[test_0181_fileInUse_xp_win_partial.js]
+[test_0182_rmrfdirFileInUse_xp_win_complete.js]
+[test_0183_rmrfdirFileInUse_xp_win_partial.js]
+[test_0184_fileInUse_xp_win_complete.js]
+[test_0185_fileInUse_xp_win_partial.js]
+[test_0186_rmrfdirFileInUse_xp_win_complete.js]
+[test_0187_rmrfdirFileInUse_xp_win_partial.js]
+; [test_0202_app_launch_apply_update_dirlocked.js] # Test disabled, bug 757632 \ No newline at end of file
diff --git a/testing/mozbase/manifestparser/tests/default-skipif.ini b/testing/mozbase/manifestparser/tests/default-skipif.ini
new file mode 100644
index 000000000..d3c268733
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/default-skipif.ini
@@ -0,0 +1,22 @@
+[DEFAULT]
+skip-if = os == 'win' && debug # a pesky comment
+
+
+[test1]
+skip-if = debug
+
+[test2]
+skip-if = os == 'linux'
+
+[test3]
+skip-if = os == 'win'
+
+[test4]
+skip-if = os == 'win' && debug
+
+[test5]
+foo = bar
+
+[test6]
+skip-if = debug # a second pesky comment
+
diff --git a/testing/mozbase/manifestparser/tests/default-suppfiles.ini b/testing/mozbase/manifestparser/tests/default-suppfiles.ini
new file mode 100644
index 000000000..12af247b8
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/default-suppfiles.ini
@@ -0,0 +1,9 @@
+[DEFAULT]
+support-files = foo.js # a comment
+
+[test7]
+[test8]
+support-files = bar.js # another comment
+[test9]
+foo = bar
+
diff --git a/testing/mozbase/manifestparser/tests/filter-example.ini b/testing/mozbase/manifestparser/tests/filter-example.ini
new file mode 100644
index 000000000..13a8734c3
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/filter-example.ini
@@ -0,0 +1,11 @@
+# illustrate test filters based on various categories
+
+[windowstest]
+skip-if = os != 'win'
+
+[fleem]
+skip-if = os == 'mac'
+
+[linuxtest]
+skip-if = (os == 'mac') || (os == 'win')
+fail-if = toolkit == 'cocoa'
diff --git a/testing/mozbase/manifestparser/tests/fleem b/testing/mozbase/manifestparser/tests/fleem
new file mode 100644
index 000000000..744817b82
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/fleem
@@ -0,0 +1 @@
+# dummy spot for "fleem" test
diff --git a/testing/mozbase/manifestparser/tests/include-example.ini b/testing/mozbase/manifestparser/tests/include-example.ini
new file mode 100644
index 000000000..69e728c3b
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/include-example.ini
@@ -0,0 +1,11 @@
+[DEFAULT]
+foo = bar
+
+[include:include/bar.ini]
+
+[fleem]
+
+[include:include/foo.ini]
+red = roses
+blue = violets
+yellow = daffodils \ No newline at end of file
diff --git a/testing/mozbase/manifestparser/tests/include-invalid.ini b/testing/mozbase/manifestparser/tests/include-invalid.ini
new file mode 100644
index 000000000..e3ed0dd6b
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/include-invalid.ini
@@ -0,0 +1 @@
+[include:invalid.ini]
diff --git a/testing/mozbase/manifestparser/tests/include/bar.ini b/testing/mozbase/manifestparser/tests/include/bar.ini
new file mode 100644
index 000000000..bcb312d1d
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/include/bar.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+foo = fleem
+
+[crash-handling] \ No newline at end of file
diff --git a/testing/mozbase/manifestparser/tests/include/crash-handling b/testing/mozbase/manifestparser/tests/include/crash-handling
new file mode 100644
index 000000000..8e19a6375
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/include/crash-handling
@@ -0,0 +1 @@
+# dummy spot for "crash-handling" test
diff --git a/testing/mozbase/manifestparser/tests/include/flowers b/testing/mozbase/manifestparser/tests/include/flowers
new file mode 100644
index 000000000..a25acfbe2
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/include/flowers
@@ -0,0 +1 @@
+# dummy spot for "flowers" test
diff --git a/testing/mozbase/manifestparser/tests/include/foo.ini b/testing/mozbase/manifestparser/tests/include/foo.ini
new file mode 100644
index 000000000..cfc90ace8
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/include/foo.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+blue = ocean
+
+[flowers]
+yellow = submarine \ No newline at end of file
diff --git a/testing/mozbase/manifestparser/tests/just-defaults.ini b/testing/mozbase/manifestparser/tests/just-defaults.ini
new file mode 100644
index 000000000..83a0cec0c
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/just-defaults.ini
@@ -0,0 +1,2 @@
+[DEFAULT]
+foo = bar
diff --git a/testing/mozbase/manifestparser/tests/manifest.ini b/testing/mozbase/manifestparser/tests/manifest.ini
new file mode 100644
index 000000000..dfa185649
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/manifest.ini
@@ -0,0 +1,11 @@
+# test manifest for manifestparser
+[test_expressionparser.py]
+[test_manifestparser.py]
+[test_testmanifest.py]
+[test_read_ini.py]
+[test_convert_directory.py]
+[test_filters.py]
+[test_chunking.py]
+
+[test_convert_symlinks.py]
+disabled = https://bugzilla.mozilla.org/show_bug.cgi?id=920938
diff --git a/testing/mozbase/manifestparser/tests/missing-path.ini b/testing/mozbase/manifestparser/tests/missing-path.ini
new file mode 100644
index 000000000..919d8e04d
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/missing-path.ini
@@ -0,0 +1,2 @@
+[foo]
+[bar]
diff --git a/testing/mozbase/manifestparser/tests/mozmill-example.ini b/testing/mozbase/manifestparser/tests/mozmill-example.ini
new file mode 100644
index 000000000..114cf48c4
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/mozmill-example.ini
@@ -0,0 +1,80 @@
+[testAddons/testDisableEnablePlugin.js]
+[testAddons/testGetAddons.js]
+[testAddons/testSearchAddons.js]
+[testAwesomeBar/testAccessLocationBar.js]
+[testAwesomeBar/testCheckItemHighlight.js]
+[testAwesomeBar/testEscapeAutocomplete.js]
+[testAwesomeBar/testFaviconInAutocomplete.js]
+[testAwesomeBar/testGoButton.js]
+[testAwesomeBar/testLocationBarSearches.js]
+[testAwesomeBar/testPasteLocationBar.js]
+[testAwesomeBar/testSuggestHistoryBookmarks.js]
+[testAwesomeBar/testVisibleItemsMax.js]
+[testBookmarks/testAddBookmarkToMenu.js]
+[testCookies/testDisableCookies.js]
+[testCookies/testEnableCookies.js]
+[testCookies/testRemoveAllCookies.js]
+[testCookies/testRemoveCookie.js]
+[testDownloading/testCloseDownloadManager.js]
+[testDownloading/testDownloadStates.js]
+[testDownloading/testOpenDownloadManager.js]
+[testFindInPage/testFindInPage.js]
+[testFormManager/testAutoCompleteOff.js]
+[testFormManager/testBasicFormCompletion.js]
+[testFormManager/testClearFormHistory.js]
+[testFormManager/testDisableFormManager.js]
+[testGeneral/testGoogleSuggestions.js]
+[testGeneral/testStopReloadButtons.js]
+[testInstallation/testBreakpadInstalled.js]
+[testLayout/testNavigateFTP.js]
+[testPasswordManager/testPasswordNotSaved.js]
+[testPasswordManager/testPasswordSavedAndDeleted.js]
+[testPopups/testPopupsAllowed.js]
+[testPopups/testPopupsBlocked.js]
+[testPreferences/testPaneRetention.js]
+[testPreferences/testPreferredLanguage.js]
+[testPreferences/testRestoreHomepageToDefault.js]
+[testPreferences/testSetToCurrentPage.js]
+[testPreferences/testSwitchPanes.js]
+[testPrivateBrowsing/testAboutPrivateBrowsing.js]
+[testPrivateBrowsing/testCloseWindow.js]
+[testPrivateBrowsing/testDisabledElements.js]
+[testPrivateBrowsing/testDisabledPermissions.js]
+[testPrivateBrowsing/testDownloadManagerClosed.js]
+[testPrivateBrowsing/testGeolocation.js]
+[testPrivateBrowsing/testStartStopPBMode.js]
+[testPrivateBrowsing/testTabRestoration.js]
+[testPrivateBrowsing/testTabsDismissedOnStop.js]
+[testSearch/testAddMozSearchProvider.js]
+[testSearch/testFocusAndSearch.js]
+[testSearch/testGetMoreSearchEngines.js]
+[testSearch/testOpenSearchAutodiscovery.js]
+[testSearch/testRemoveSearchEngine.js]
+[testSearch/testReorderSearchEngines.js]
+[testSearch/testRestoreDefaults.js]
+[testSearch/testSearchSelection.js]
+[testSearch/testSearchSuggestions.js]
+[testSecurity/testBlueLarry.js]
+[testSecurity/testDefaultPhishingEnabled.js]
+[testSecurity/testDefaultSecurityPrefs.js]
+[testSecurity/testEncryptedPageWarning.js]
+[testSecurity/testGreenLarry.js]
+[testSecurity/testGreyLarry.js]
+[testSecurity/testIdentityPopupOpenClose.js]
+[testSecurity/testSSLDisabledErrorPage.js]
+[testSecurity/testSafeBrowsingNotificationBar.js]
+[testSecurity/testSafeBrowsingWarningPages.js]
+[testSecurity/testSecurityInfoViaMoreInformation.js]
+[testSecurity/testSecurityNotification.js]
+[testSecurity/testSubmitUnencryptedInfoWarning.js]
+[testSecurity/testUnknownIssuer.js]
+[testSecurity/testUntrustedConnectionErrorPage.js]
+[testSessionStore/testUndoTabFromContextMenu.js]
+[testTabbedBrowsing/testBackgroundTabScrolling.js]
+[testTabbedBrowsing/testCloseTab.js]
+[testTabbedBrowsing/testNewTab.js]
+[testTabbedBrowsing/testNewWindow.js]
+[testTabbedBrowsing/testOpenInBackground.js]
+[testTabbedBrowsing/testOpenInForeground.js]
+[testTechnicalTools/testAccessPageInfoDialog.js]
+[testToolbar/testBackForwardButtons.js]
diff --git a/testing/mozbase/manifestparser/tests/mozmill-restart-example.ini b/testing/mozbase/manifestparser/tests/mozmill-restart-example.ini
new file mode 100644
index 000000000..e27ae9b93
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/mozmill-restart-example.ini
@@ -0,0 +1,26 @@
+[DEFAULT]
+type = restart
+
+[restartTests/testExtensionInstallUninstall/test2.js]
+foo = bar
+
+[restartTests/testExtensionInstallUninstall/test1.js]
+foo = baz
+
+[restartTests/testExtensionInstallUninstall/test3.js]
+[restartTests/testSoftwareUpdateAutoProxy/test2.js]
+[restartTests/testSoftwareUpdateAutoProxy/test1.js]
+[restartTests/testMasterPassword/test1.js]
+[restartTests/testExtensionInstallGetAddons/test2.js]
+[restartTests/testExtensionInstallGetAddons/test1.js]
+[restartTests/testMultipleExtensionInstallation/test2.js]
+[restartTests/testMultipleExtensionInstallation/test1.js]
+[restartTests/testThemeInstallUninstall/test2.js]
+[restartTests/testThemeInstallUninstall/test1.js]
+[restartTests/testThemeInstallUninstall/test3.js]
+[restartTests/testDefaultBookmarks/test1.js]
+[softwareUpdate/testFallbackUpdate/test2.js]
+[softwareUpdate/testFallbackUpdate/test1.js]
+[softwareUpdate/testFallbackUpdate/test3.js]
+[softwareUpdate/testDirectUpdate/test2.js]
+[softwareUpdate/testDirectUpdate/test1.js]
diff --git a/testing/mozbase/manifestparser/tests/no-tests.ini b/testing/mozbase/manifestparser/tests/no-tests.ini
new file mode 100644
index 000000000..83a0cec0c
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/no-tests.ini
@@ -0,0 +1,2 @@
+[DEFAULT]
+foo = bar
diff --git a/testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini b/testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini
new file mode 100644
index 000000000..828525c18
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini
@@ -0,0 +1,3 @@
+[parent:../manifest.ini]
+
+[testFirst.js]
diff --git a/testing/mozbase/manifestparser/tests/parent/include/manifest.ini b/testing/mozbase/manifestparser/tests/parent/include/manifest.ini
new file mode 100644
index 000000000..fb9756d6a
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/include/manifest.ini
@@ -0,0 +1,8 @@
+[DEFAULT]
+top = data
+
+[include:first/manifest.ini]
+disabled = YES
+
+[include:second/manifest.ini]
+disabled = NO
diff --git a/testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini b/testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini
new file mode 100644
index 000000000..31f053756
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini
@@ -0,0 +1,3 @@
+[parent:../manifest.ini]
+
+[testSecond.js]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini
new file mode 100644
index 000000000..ac7c370c3
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+x = level_1
+
+[test_1]
+[test_2]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_1_server-root.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_1_server-root.ini
new file mode 100644
index 000000000..486a9596e
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_1_server-root.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+server-root = ../root
+other-root = ../root
+
+[test_1]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini
new file mode 100644
index 000000000..ada6a510d
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini
@@ -0,0 +1,3 @@
+[parent:../level_1.ini]
+
+[test_2]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2_server-root.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2_server-root.ini
new file mode 100644
index 000000000..218789784
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2_server-root.ini
@@ -0,0 +1,3 @@
+[parent:../level_1_server-root.ini]
+
+[test_2]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini
new file mode 100644
index 000000000..2edd647fc
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini
@@ -0,0 +1,3 @@
+[parent:../level_2.ini]
+
+[test_3]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini
new file mode 100644
index 000000000..d6aae60ae
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini
@@ -0,0 +1,6 @@
+[parent:../level_2.ini]
+
+[DEFAULT]
+x = level_3
+
+[test_3]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_server-root.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_server-root.ini
new file mode 100644
index 000000000..0427087b4
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_server-root.ini
@@ -0,0 +1,3 @@
+[parent:../level_2_server-root.ini]
+
+[test_3]
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3 b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3
new file mode 100644
index 000000000..f5de58752
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3
@@ -0,0 +1 @@
+# dummy spot for "test_3" test
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2 b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2
new file mode 100644
index 000000000..5b77e04f3
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2
@@ -0,0 +1 @@
+# dummy spot for "test_2" test
diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/test_1 b/testing/mozbase/manifestparser/tests/parent/level_1/test_1
new file mode 100644
index 000000000..dccbf04e4
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/level_1/test_1
@@ -0,0 +1 @@
+# dummy spot for "test_1" test
diff --git a/testing/mozbase/manifestparser/tests/parent/root/dummy b/testing/mozbase/manifestparser/tests/parent/root/dummy
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/parent/root/dummy
diff --git a/testing/mozbase/manifestparser/tests/path-example.ini b/testing/mozbase/manifestparser/tests/path-example.ini
new file mode 100644
index 000000000..366782d95
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/path-example.ini
@@ -0,0 +1,2 @@
+[foo]
+path = fleem \ No newline at end of file
diff --git a/testing/mozbase/manifestparser/tests/relative-path.ini b/testing/mozbase/manifestparser/tests/relative-path.ini
new file mode 100644
index 000000000..57105489b
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/relative-path.ini
@@ -0,0 +1,5 @@
+[foo]
+path = ../fleem
+
+[bar]
+path = ../testsSIBLING/example
diff --git a/testing/mozbase/manifestparser/tests/subsuite.ini b/testing/mozbase/manifestparser/tests/subsuite.ini
new file mode 100644
index 000000000..c1a70bd44
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/subsuite.ini
@@ -0,0 +1,13 @@
+[test1]
+subsuite=bar,foo=="bar" # this has a comment
+
+[test2]
+subsuite=bar,foo=="bar"
+
+[test3]
+subsuite=baz
+
+[test4]
+[test5]
+[test6]
+subsuite=bar,foo=="szy" || foo=="bar" \ No newline at end of file
diff --git a/testing/mozbase/manifestparser/tests/test_chunking.py b/testing/mozbase/manifestparser/tests/test_chunking.py
new file mode 100644
index 000000000..719bbca80
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_chunking.py
@@ -0,0 +1,302 @@
+#!/usr/bin/env python
+
+from itertools import chain
+from unittest import TestCase
+import os
+import random
+
+from manifestparser.filters import (
+ chunk_by_dir,
+ chunk_by_runtime,
+ chunk_by_slice,
+)
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ChunkBySlice(TestCase):
+ """Test chunking related filters"""
+
+ def generate_tests(self, num, disabled=None):
+ disabled = disabled or []
+ tests = []
+ for i in range(num):
+ test = {'name': 'test%i' % i}
+ if i in disabled:
+ test['disabled'] = ''
+ tests.append(test)
+ return tests
+
+ def run_all_combos(self, num_tests, disabled=None):
+ tests = self.generate_tests(num_tests, disabled=disabled)
+
+ for total in range(1, num_tests + 1):
+ res = []
+ res_disabled = []
+ for chunk in range(1, total + 1):
+ f = chunk_by_slice(chunk, total)
+ res.append(list(f(tests, {})))
+ if disabled:
+ f.disabled = True
+ res_disabled.append(list(f(tests, {})))
+
+ lengths = [len([t for t in c if 'disabled' not in t]) for c in res]
+ # the chunk with the most tests should have at most one more test
+ # than the chunk with the least tests
+ self.assertLessEqual(max(lengths) - min(lengths), 1)
+
+ # chaining all chunks back together should equal the original list
+ # of tests
+ self.assertEqual(list(chain.from_iterable(res)), list(tests))
+
+ if disabled:
+ lengths = [len(c) for c in res_disabled]
+ self.assertLessEqual(max(lengths) - min(lengths), 1)
+ self.assertEqual(list(chain.from_iterable(res_disabled)),
+ list(tests))
+
+ def test_chunk_by_slice(self):
+ chunk = chunk_by_slice(1, 1)
+ self.assertEqual(list(chunk([], {})), [])
+
+ self.run_all_combos(num_tests=1)
+ self.run_all_combos(num_tests=10, disabled=[1, 2])
+
+ num_tests = 67
+ disabled = list(i for i in xrange(num_tests) if i % 4 == 0)
+ self.run_all_combos(num_tests=num_tests, disabled=disabled)
+
+ def test_two_times_more_chunks_than_tests(self):
+ # test case for bug 1182817
+ tests = self.generate_tests(5)
+
+ total_chunks = 10
+ for i in range(1, total_chunks + 1):
+ # ensure IndexError is not raised
+ chunk_by_slice(i, total_chunks)(tests, {})
+
+
+class ChunkByDir(TestCase):
+ """Test chunking related filters"""
+
+ def generate_tests(self, dirs):
+ """
+ :param dirs: dict of the form,
+ { <dir>: <num tests> }
+ """
+ i = 0
+ for d, num in dirs.iteritems():
+ for j in range(num):
+ i += 1
+ name = 'test%i' % i
+ test = {'name': name,
+ 'relpath': os.path.join(d, name)}
+ yield test
+
+ def run_all_combos(self, dirs):
+ tests = list(self.generate_tests(dirs))
+
+ deepest = max(len(t['relpath'].split(os.sep)) - 1 for t in tests)
+ for depth in range(1, deepest + 1):
+
+ def num_groups(tests):
+ unique = set()
+ for p in [t['relpath'] for t in tests]:
+ p = p.split(os.sep)
+ p = p[:min(depth, len(p) - 1)]
+ unique.add(os.sep.join(p))
+ return len(unique)
+
+ for total in range(1, num_groups(tests) + 1):
+ res = []
+ for this in range(1, total + 1):
+ f = chunk_by_dir(this, total, depth)
+ res.append(list(f(tests, {})))
+
+ lengths = map(num_groups, res)
+ # the chunk with the most dirs should have at most one more
+ # dir than the chunk with the least dirs
+ self.assertLessEqual(max(lengths) - min(lengths), 1)
+
+ all_chunks = list(chain.from_iterable(res))
+ # chunk_by_dir will mess up order, but chained chunks should
+ # contain all of the original tests and be the same length
+ self.assertEqual(len(all_chunks), len(tests))
+ for t in tests:
+ self.assertIn(t, all_chunks)
+
+ def test_chunk_by_dir(self):
+ chunk = chunk_by_dir(1, 1, 1)
+ self.assertEqual(list(chunk([], {})), [])
+
+ dirs = {
+ 'a': 2,
+ }
+ self.run_all_combos(dirs)
+
+ dirs = {
+ '': 1,
+ 'foo': 1,
+ 'bar': 0,
+ '/foobar': 1,
+ }
+ self.run_all_combos(dirs)
+
+ dirs = {
+ 'a': 1,
+ 'b': 1,
+ 'a/b': 2,
+ 'a/c': 1,
+ }
+ self.run_all_combos(dirs)
+
+ dirs = {
+ 'a': 5,
+ 'a/b': 4,
+ 'a/b/c': 7,
+ 'a/b/c/d': 1,
+ 'a/b/c/e': 3,
+ 'b/c': 2,
+ 'b/d': 5,
+ 'b/d/e': 6,
+ 'c': 8,
+ 'c/d/e/f/g/h/i/j/k/l': 5,
+ 'c/d/e/f/g/i/j/k/l/m/n': 2,
+ 'c/e': 1,
+ }
+ self.run_all_combos(dirs)
+
+
+class ChunkByRuntime(TestCase):
+ """Test chunking related filters"""
+
+ def generate_tests(self, dirs):
+ """
+ :param dirs: dict of the form,
+ { <dir>: <num tests> }
+ """
+ i = 0
+ for d, num in dirs.iteritems():
+ for j in range(num):
+ i += 1
+ name = 'test%i' % i
+ test = {'name': name,
+ 'relpath': os.path.join(d, name),
+ 'manifest': os.path.join(d, 'manifest.ini')}
+ yield test
+
+ def get_runtimes(self, tests):
+ runtimes = {}
+ for test in tests:
+ runtimes[test['relpath']] = random.randint(0, 100)
+ return runtimes
+
+ def chunk_by_round_robin(self, tests, runtimes):
+ manifests = set(t['manifest'] for t in tests)
+ tests_by_manifest = []
+ for manifest in manifests:
+ mtests = [t for t in tests if t['manifest'] == manifest]
+ total = sum(runtimes[t['relpath']] for t in mtests
+ if 'disabled' not in t)
+ tests_by_manifest.append((total, mtests))
+ tests_by_manifest.sort()
+
+ chunks = [[] for i in range(total)]
+ d = 1 # direction
+ i = 0
+ for runtime, batch in tests_by_manifest:
+ chunks[i].extend(batch)
+
+ # "draft" style (last pick goes first in the next round)
+ if (i == 0 and d == -1) or (i == total - 1 and d == 1):
+ d = -d
+ else:
+ i += d
+
+ # make sure this test algorithm is valid
+ all_chunks = list(chain.from_iterable(chunks))
+ self.assertEqual(len(all_chunks), len(tests))
+ for t in tests:
+ self.assertIn(t, all_chunks)
+
+ return chunks
+
+ def run_all_combos(self, dirs):
+ tests = list(self.generate_tests(dirs))
+ runtimes = self.get_runtimes(tests)
+
+ for total in range(1, len(dirs) + 1):
+ chunks = []
+ for this in range(1, total + 1):
+ f = chunk_by_runtime(this, total, runtimes)
+ ret = list(f(tests, {}))
+ chunks.append(ret)
+
+ # chunk_by_runtime will mess up order, but chained chunks should
+ # contain all of the original tests and be the same length
+ all_chunks = list(chain.from_iterable(chunks))
+ self.assertEqual(len(all_chunks), len(tests))
+ for t in tests:
+ self.assertIn(t, all_chunks)
+
+ # calculate delta between slowest and fastest chunks
+ def runtime_delta(chunks):
+ totals = []
+ for chunk in chunks:
+ total = sum(runtimes[t['relpath']] for t in chunk
+ if 'disabled' not in t)
+ totals.append(total)
+ return max(totals) - min(totals)
+ delta = runtime_delta(chunks)
+
+ # redo the chunking a second time using a round robin style
+ # algorithm
+ chunks = self.chunk_by_round_robin(tests, runtimes)
+
+ # since chunks will never have exactly equal runtimes, it's hard
+ # to tell if they were chunked optimally. Make sure it at least
+ # beats a naive round robin approach.
+ self.assertLessEqual(delta, runtime_delta(chunks))
+
+ def test_chunk_by_runtime(self):
+ random.seed(42)
+
+ chunk = chunk_by_runtime(1, 1, {})
+ self.assertEqual(list(chunk([], {})), [])
+
+ dirs = {
+ 'a': 2,
+ }
+ self.run_all_combos(dirs)
+
+ dirs = {
+ '': 1,
+ 'foo': 1,
+ 'bar': 0,
+ '/foobar': 1,
+ }
+ self.run_all_combos(dirs)
+
+ dirs = {
+ 'a': 1,
+ 'b': 1,
+ 'a/b': 2,
+ 'a/c': 1,
+ }
+ self.run_all_combos(dirs)
+
+ dirs = {
+ 'a': 5,
+ 'a/b': 4,
+ 'a/b/c': 7,
+ 'a/b/c/d': 1,
+ 'a/b/c/e': 3,
+ 'b/c': 2,
+ 'b/d': 5,
+ 'b/d/e': 6,
+ 'c': 8,
+ 'c/d/e/f/g/h/i/j/k/l': 5,
+ 'c/d/e/f/g/i/j/k/l/m/n': 2,
+ 'c/e': 1,
+ }
+ self.run_all_combos(dirs)
diff --git a/testing/mozbase/manifestparser/tests/test_convert_directory.py b/testing/mozbase/manifestparser/tests/test_convert_directory.py
new file mode 100755
index 000000000..12776e4e4
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_convert_directory.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+import unittest
+
+from manifestparser import convert
+from manifestparser import ManifestParser
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+# In some cases tempfile.mkdtemp() may returns a path which contains
+# symlinks. Some tests here will then break, as the manifestparser.convert
+# function returns paths that does not contains symlinks.
+#
+# Workaround is to use the following function, if absolute path of temp dir
+# must be compared.
+
+
+def create_realpath_tempdir():
+ """
+ Create a tempdir without symlinks.
+ """
+ return os.path.realpath(tempfile.mkdtemp())
+
+
+class TestDirectoryConversion(unittest.TestCase):
+ """test conversion of a directory tree to a manifest structure"""
+
+ def create_stub(self, directory=None):
+ """stub out a directory with files in it"""
+
+ files = ('foo', 'bar', 'fleem')
+ if directory is None:
+ directory = create_realpath_tempdir()
+ for i in files:
+ file(os.path.join(directory, i), 'w').write(i)
+ subdir = os.path.join(directory, 'subdir')
+ os.mkdir(subdir)
+ file(os.path.join(subdir, 'subfile'), 'w').write('baz')
+ return directory
+
+ def test_directory_to_manifest(self):
+ """
+ Test our ability to convert a static directory structure to a
+ manifest.
+ """
+
+ # create a stub directory
+ stub = self.create_stub()
+ try:
+ stub = stub.replace(os.path.sep, "/")
+ self.assertTrue(os.path.exists(stub) and os.path.isdir(stub))
+
+ # Make a manifest for it
+ manifest = convert([stub])
+ out_tmpl = """[%(stub)s/bar]
+
+[%(stub)s/fleem]
+
+[%(stub)s/foo]
+
+[%(stub)s/subdir/subfile]
+
+""" # noqa
+ self.assertEqual(str(manifest), out_tmpl % dict(stub=stub))
+ except:
+ raise
+ finally:
+ shutil.rmtree(stub) # cleanup
+
+ def test_convert_directory_manifests_in_place(self):
+ """
+ keep the manifests in place
+ """
+
+ stub = self.create_stub()
+ try:
+ ManifestParser.populate_directory_manifests([stub], filename='manifest.ini')
+ self.assertEqual(sorted(os.listdir(stub)),
+ ['bar', 'fleem', 'foo', 'manifest.ini', 'subdir'])
+ parser = ManifestParser()
+ parser.read(os.path.join(stub, 'manifest.ini'))
+ self.assertEqual([i['name'] for i in parser.tests],
+ ['subfile', 'bar', 'fleem', 'foo'])
+ parser = ManifestParser()
+ parser.read(os.path.join(stub, 'subdir', 'manifest.ini'))
+ self.assertEqual(len(parser.tests), 1)
+ self.assertEqual(parser.tests[0]['name'], 'subfile')
+ except:
+ raise
+ finally:
+ shutil.rmtree(stub)
+
+ def test_manifest_ignore(self):
+ """test manifest `ignore` parameter for ignoring directories"""
+
+ stub = self.create_stub()
+ try:
+ ManifestParser.populate_directory_manifests(
+ [stub], filename='manifest.ini', ignore=('subdir',))
+ parser = ManifestParser()
+ parser.read(os.path.join(stub, 'manifest.ini'))
+ self.assertEqual([i['name'] for i in parser.tests],
+ ['bar', 'fleem', 'foo'])
+ self.assertFalse(os.path.exists(os.path.join(stub, 'subdir', 'manifest.ini')))
+ except:
+ raise
+ finally:
+ shutil.rmtree(stub)
+
+ def test_pattern(self):
+ """test directory -> manifest with a file pattern"""
+
+ stub = self.create_stub()
+ try:
+ parser = convert([stub], pattern='f*', relative_to=stub)
+ self.assertEqual([i['name'] for i in parser.tests],
+ ['fleem', 'foo'])
+
+ # test multiple patterns
+ parser = convert([stub], pattern=('f*', 's*'), relative_to=stub)
+ self.assertEqual([i['name'] for i in parser.tests],
+ ['fleem', 'foo', 'subdir/subfile'])
+ except:
+ raise
+ finally:
+ shutil.rmtree(stub)
+
+ def test_update(self):
+ """
+ Test our ability to update tests from a manifest and a directory of
+ files
+ """
+
+ # boilerplate
+ tempdir = create_realpath_tempdir()
+ for i in range(10):
+ file(os.path.join(tempdir, str(i)), 'w').write(str(i))
+
+ # otherwise empty directory with a manifest file
+ newtempdir = create_realpath_tempdir()
+ manifest_file = os.path.join(newtempdir, 'manifest.ini')
+ manifest_contents = str(convert([tempdir], relative_to=tempdir))
+ with file(manifest_file, 'w') as f:
+ f.write(manifest_contents)
+
+ # get the manifest
+ manifest = ManifestParser(manifests=(manifest_file,))
+
+ # All of the tests are initially missing:
+ paths = [str(i) for i in range(10)]
+ self.assertEqual([i['name'] for i in manifest.missing()],
+ paths)
+
+ # But then we copy one over:
+ self.assertEqual(manifest.get('name', name='1'), ['1'])
+ manifest.update(tempdir, name='1')
+ self.assertEqual(sorted(os.listdir(newtempdir)),
+ ['1', 'manifest.ini'])
+
+ # Update that one file and copy all the "tests":
+ file(os.path.join(tempdir, '1'), 'w').write('secret door')
+ manifest.update(tempdir)
+ self.assertEqual(sorted(os.listdir(newtempdir)),
+ ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'manifest.ini'])
+ self.assertEqual(file(os.path.join(newtempdir, '1')).read().strip(),
+ 'secret door')
+
+ # clean up:
+ shutil.rmtree(tempdir)
+ shutil.rmtree(newtempdir)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/test_convert_symlinks.py b/testing/mozbase/manifestparser/tests/test_convert_symlinks.py
new file mode 100755
index 000000000..9a0640b4b
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_convert_symlinks.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+import unittest
+
+from manifestparser import convert, ManifestParser
+
+
+class TestSymlinkConversion(unittest.TestCase):
+ """
+ test conversion of a directory tree with symlinks to a manifest structure
+ """
+
+ def create_stub(self, directory=None):
+ """stub out a directory with files in it"""
+
+ files = ('foo', 'bar', 'fleem')
+ if directory is None:
+ directory = tempfile.mkdtemp()
+ for i in files:
+ file(os.path.join(directory, i), 'w').write(i)
+ subdir = os.path.join(directory, 'subdir')
+ os.mkdir(subdir)
+ file(os.path.join(subdir, 'subfile'), 'w').write('baz')
+ return directory
+
+ def test_relpath(self):
+ """test convert `relative_to` functionality"""
+
+ oldcwd = os.getcwd()
+ stub = self.create_stub()
+ try:
+ # subdir with in-memory manifest
+ files = ['../bar', '../fleem', '../foo', 'subfile']
+ subdir = os.path.join(stub, 'subdir')
+ os.chdir(subdir)
+ parser = convert([stub], relative_to='.')
+ self.assertEqual([i['name'] for i in parser.tests],
+ files)
+ except:
+ raise
+ finally:
+ shutil.rmtree(stub)
+ os.chdir(oldcwd)
+
+ @unittest.skipIf(not hasattr(os, 'symlink'),
+ "symlinks unavailable on this platform")
+ def test_relpath_symlink(self):
+ """
+ Ensure `relative_to` works in a symlink.
+ Not available on windows.
+ """
+
+ oldcwd = os.getcwd()
+ workspace = tempfile.mkdtemp()
+ try:
+ tmpdir = os.path.join(workspace, 'directory')
+ os.makedirs(tmpdir)
+ linkdir = os.path.join(workspace, 'link')
+ os.symlink(tmpdir, linkdir)
+ self.create_stub(tmpdir)
+
+ # subdir with in-memory manifest
+ files = ['../bar', '../fleem', '../foo', 'subfile']
+ subdir = os.path.join(linkdir, 'subdir')
+ os.chdir(os.path.realpath(subdir))
+ for directory in (tmpdir, linkdir):
+ parser = convert([directory], relative_to='.')
+ self.assertEqual([i['name'] for i in parser.tests],
+ files)
+ finally:
+ shutil.rmtree(workspace)
+ os.chdir(oldcwd)
+
+ # a more complicated example
+ oldcwd = os.getcwd()
+ workspace = tempfile.mkdtemp()
+ try:
+ tmpdir = os.path.join(workspace, 'directory')
+ os.makedirs(tmpdir)
+ linkdir = os.path.join(workspace, 'link')
+ os.symlink(tmpdir, linkdir)
+ self.create_stub(tmpdir)
+ files = ['../bar', '../fleem', '../foo', 'subfile']
+ subdir = os.path.join(linkdir, 'subdir')
+ subsubdir = os.path.join(subdir, 'sub')
+ os.makedirs(subsubdir)
+ linksubdir = os.path.join(linkdir, 'linky')
+ linksubsubdir = os.path.join(subsubdir, 'linky')
+ os.symlink(subdir, linksubdir)
+ os.symlink(subdir, linksubsubdir)
+ for dest in (subdir,):
+ os.chdir(dest)
+ for directory in (tmpdir, linkdir):
+ parser = convert([directory], relative_to='.')
+ self.assertEqual([i['name'] for i in parser.tests],
+ files)
+ finally:
+ shutil.rmtree(workspace)
+ os.chdir(oldcwd)
+
+ @unittest.skipIf(not hasattr(os, 'symlink'),
+ "symlinks unavailable on this platform")
+ def test_recursion_symlinks(self):
+ workspace = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, workspace)
+
+ # create two dirs
+ os.makedirs(os.path.join(workspace, 'dir1'))
+ os.makedirs(os.path.join(workspace, 'dir2'))
+
+ # create cyclical symlinks
+ os.symlink(os.path.join('..', 'dir1'),
+ os.path.join(workspace, 'dir2', 'ldir1'))
+ os.symlink(os.path.join('..', 'dir2'),
+ os.path.join(workspace, 'dir1', 'ldir2'))
+
+ # create one file in each dir
+ open(os.path.join(workspace, 'dir1', 'f1.txt'), 'a').close()
+ open(os.path.join(workspace, 'dir1', 'ldir2', 'f2.txt'), 'a').close()
+
+ data = []
+
+ def callback(rootdirectory, directory, subdirs, files):
+ for f in files:
+ data.append(f)
+
+ ManifestParser._walk_directories([workspace], callback)
+ self.assertEqual(sorted(data), ['f1.txt', 'f2.txt'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/test_default_overrides.py b/testing/mozbase/manifestparser/tests/test_default_overrides.py
new file mode 100755
index 000000000..3341c4bd8
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_default_overrides.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from manifestparser import ManifestParser
+from manifestparser import combine_fields
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestDefaultSkipif(unittest.TestCase):
+ """Tests applying a skip-if condition in [DEFAULT] and || with the value for the test"""
+
+ def test_defaults(self):
+
+ default = os.path.join(here, 'default-skipif.ini')
+ parser = ManifestParser(manifests=(default,))
+ for test in parser.tests:
+ if test['name'] == 'test1':
+ self.assertEqual(test['skip-if'], "(os == 'win' && debug ) || (debug)")
+ elif test['name'] == 'test2':
+ self.assertEqual(test['skip-if'], "(os == 'win' && debug ) || (os == 'linux')")
+ elif test['name'] == 'test3':
+ self.assertEqual(test['skip-if'], "(os == 'win' && debug ) || (os == 'win')")
+ elif test['name'] == 'test4':
+ self.assertEqual(
+ test['skip-if'], "(os == 'win' && debug ) || (os == 'win' && debug)")
+ elif test['name'] == 'test5':
+ self.assertEqual(test['skip-if'], "os == 'win' && debug # a pesky comment")
+ elif test['name'] == 'test6':
+ self.assertEqual(test['skip-if'], "(os == 'win' && debug ) || (debug )")
+
+
+class TestDefaultSupportFiles(unittest.TestCase):
+ """Tests combining support-files field in [DEFAULT] with the value for a test"""
+
+ def test_defaults(self):
+
+ default = os.path.join(here, 'default-suppfiles.ini')
+ parser = ManifestParser(manifests=(default,))
+ expected_supp_files = {
+ 'test7': 'foo.js # a comment',
+ 'test8': 'foo.js bar.js ',
+ 'test9': 'foo.js # a comment',
+ }
+ for test in parser.tests:
+ expected = expected_supp_files[test['name']]
+ self.assertEqual(test['support-files'], expected)
+
+
+class TestOmitDefaults(unittest.TestCase):
+ """Tests passing omit-defaults prevents defaults from propagating to definitions.
+ """
+
+ def test_defaults(self):
+ manifests = (os.path.join(here, 'default-suppfiles.ini'),
+ os.path.join(here, 'default-skipif.ini'))
+ parser = ManifestParser(manifests=manifests, handle_defaults=False)
+ expected_supp_files = {
+ 'test8': 'bar.js # another comment',
+ }
+ expected_skip_ifs = {
+ 'test1': "debug",
+ 'test2': "os == 'linux'",
+ 'test3': "os == 'win'",
+ 'test4': "os == 'win' && debug",
+ 'test6': "debug # a second pesky comment",
+ }
+ for test in parser.tests:
+ for field, expectations in (('support-files', expected_supp_files),
+ ('skip-if', expected_skip_ifs)):
+ expected = expectations.get(test['name'])
+ if not expected:
+ self.assertNotIn(field, test)
+ else:
+ self.assertEqual(test[field], expected)
+
+ expected_defaults = {
+ os.path.join(here, 'default-suppfiles.ini'): {
+ "support-files": "foo.js # a comment",
+ },
+ os.path.join(here, 'default-skipif.ini'): {
+ "skip-if": "os == 'win' && debug # a pesky comment",
+ },
+ }
+ for path, defaults in expected_defaults.items():
+ self.assertIn(path, parser.manifest_defaults)
+ actual_defaults = parser.manifest_defaults[path]
+ for key, value in defaults.items():
+ self.assertIn(key, actual_defaults)
+ self.assertEqual(value, actual_defaults[key])
+
+
+class TestSubsuiteDefaults(unittest.TestCase):
+ """Test that subsuites are handled correctly when managing defaults
+ outside of the manifest parser."""
+ def test_subsuite_defaults(self):
+ manifest = os.path.join(here, 'default-subsuite.ini')
+ parser = ManifestParser(manifests=(manifest,), handle_defaults=False)
+ expected_subsuites = {
+ 'test1': 'baz',
+ 'test2': 'foo',
+ }
+ defaults = parser.manifest_defaults[manifest]
+ for test in parser.tests:
+ value = combine_fields(defaults, test)
+ self.assertEqual(expected_subsuites[value['name']],
+ value['subsuite'])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/test_expressionparser.py b/testing/mozbase/manifestparser/tests/test_expressionparser.py
new file mode 100755
index 000000000..dc3f2fd3d
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_expressionparser.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+
+import unittest
+from manifestparser import parse
+
+
+class ExpressionParserTest(unittest.TestCase):
+ """Test the conditional expression parser."""
+
+ def test_basic(self):
+
+ self.assertEqual(parse("1"), 1)
+ self.assertEqual(parse("100"), 100)
+ self.assertEqual(parse("true"), True)
+ self.assertEqual(parse("false"), False)
+ self.assertEqual('', parse('""'))
+ self.assertEqual(parse('"foo bar"'), 'foo bar')
+ self.assertEqual(parse("'foo bar'"), 'foo bar')
+ self.assertEqual(parse("foo", foo=1), 1)
+ self.assertEqual(parse("bar", bar=True), True)
+ self.assertEqual(parse("abc123", abc123="xyz"), 'xyz')
+
+ def test_equality(self):
+
+ self.assertTrue(parse("true == true"))
+ self.assertTrue(parse("false == false"))
+ self.assertTrue(parse("1 == 1"))
+ self.assertTrue(parse("100 == 100"))
+ self.assertTrue(parse('"some text" == "some text"'))
+ self.assertTrue(parse("true != false"))
+ self.assertTrue(parse("1 != 2"))
+ self.assertTrue(parse('"text" != "other text"'))
+ self.assertTrue(parse("foo == true", foo=True))
+ self.assertTrue(parse("foo == 1", foo=1))
+ self.assertTrue(parse('foo == "bar"', foo='bar'))
+ self.assertTrue(parse("foo == bar", foo=True, bar=True))
+ self.assertTrue(parse("true == foo", foo=True))
+ self.assertTrue(parse("foo != true", foo=False))
+ self.assertTrue(parse("foo != 2", foo=1))
+ self.assertTrue(parse('foo != "bar"', foo='abc'))
+ self.assertTrue(parse("foo != bar", foo=True, bar=False))
+ self.assertTrue(parse("true != foo", foo=False))
+ self.assertTrue(parse("!false"))
+
+ def test_conjunctures(self):
+ self.assertTrue(parse("true && true"))
+ self.assertTrue(parse("true || false"))
+ self.assertFalse(parse("false || false"))
+ self.assertFalse(parse("true && false"))
+ self.assertTrue(parse("true || false && false"))
+
+ def test_parentheses(self):
+ self.assertTrue(parse("(true)"))
+ self.assertEqual(parse("(10)"), 10)
+ self.assertEqual(parse('("foo")'), 'foo')
+ self.assertEqual(parse("(foo)", foo=1), 1)
+ self.assertTrue(parse("(true == true)"), True)
+ self.assertTrue(parse("(true != false)"))
+ self.assertTrue(parse("(true && true)"))
+ self.assertTrue(parse("(true || false)"))
+ self.assertTrue(parse("(true && true || false)"))
+ self.assertFalse(parse("(true || false) && false"))
+ self.assertTrue(parse("(true || false) && true"))
+ self.assertTrue(parse("true && (true || false)"))
+ self.assertTrue(parse("true && (true || false)"))
+ self.assertTrue(parse("(true && false) || (true && (true || false))"))
+
+ def test_comments(self):
+ # comments in expressions work accidentally, via an implementation
+ # detail - the '#' character doesn't match any of the regular
+ # expressions we specify as tokens, and thus are ignored.
+ # However, having explicit tests for them means that should the
+ # implementation ever change, comments continue to work, even if that
+ # means a new implementation must handle them explicitly.
+ self.assertTrue(parse("true == true # it does!"))
+ self.assertTrue(parse("false == false # it does"))
+ self.assertTrue(parse("false != true # it doesnt"))
+ self.assertTrue(parse('"string with #" == "string with #" # really, it does'))
+ self.assertTrue(parse('"string with #" != "string with # but not the same" # no match!'))
+
+ def test_not(self):
+ """
+ Test the ! operator.
+ """
+ self.assertTrue(parse("!false"))
+ self.assertTrue(parse("!(false)"))
+ self.assertFalse(parse("!true"))
+ self.assertFalse(parse("!(true)"))
+ self.assertTrue(parse("!true || true)"))
+ self.assertTrue(parse("true || !true)"))
+ self.assertFalse(parse("!true && true"))
+ self.assertFalse(parse("true && !true"))
+
+ def test_lesser_than(self):
+ """
+ Test the < operator.
+ """
+ self.assertTrue(parse("1 < 2"))
+ self.assertFalse(parse("3 < 2"))
+ self.assertTrue(parse("false || (1 < 2)"))
+ self.assertTrue(parse("1 < 2 && true"))
+ self.assertTrue(parse("true && 1 < 2"))
+ self.assertTrue(parse("!(5 < 1)"))
+ self.assertTrue(parse("'abc' < 'def'"))
+ self.assertFalse(parse("1 < 1"))
+ self.assertFalse(parse("'abc' < 'abc'"))
+
+ def test_greater_than(self):
+ """
+ Test the > operator.
+ """
+ self.assertTrue(parse("2 > 1"))
+ self.assertFalse(parse("2 > 3"))
+ self.assertTrue(parse("false || (2 > 1)"))
+ self.assertTrue(parse("2 > 1 && true"))
+ self.assertTrue(parse("true && 2 > 1"))
+ self.assertTrue(parse("!(1 > 5)"))
+ self.assertTrue(parse("'def' > 'abc'"))
+ self.assertFalse(parse("1 > 1"))
+ self.assertFalse(parse("'abc' > 'abc'"))
+
+ def test_lesser_or_equals_than(self):
+ """
+ Test the <= operator.
+ """
+ self.assertTrue(parse("1 <= 2"))
+ self.assertFalse(parse("3 <= 2"))
+ self.assertTrue(parse("false || (1 <= 2)"))
+ self.assertTrue(parse("1 < 2 && true"))
+ self.assertTrue(parse("true && 1 <= 2"))
+ self.assertTrue(parse("!(5 <= 1)"))
+ self.assertTrue(parse("'abc' <= 'def'"))
+ self.assertTrue(parse("1 <= 1"))
+ self.assertTrue(parse("'abc' <= 'abc'"))
+
+ def test_greater_or_equals_than(self):
+ """
+ Test the > operator.
+ """
+ self.assertTrue(parse("2 >= 1"))
+ self.assertFalse(parse("2 >= 3"))
+ self.assertTrue(parse("false || (2 >= 1)"))
+ self.assertTrue(parse("2 >= 1 && true"))
+ self.assertTrue(parse("true && 2 >= 1"))
+ self.assertTrue(parse("!(1 >= 5)"))
+ self.assertTrue(parse("'def' >= 'abc'"))
+ self.assertTrue(parse("1 >= 1"))
+ self.assertTrue(parse("'abc' >= 'abc'"))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/test_filters.py b/testing/mozbase/manifestparser/tests/test_filters.py
new file mode 100644
index 000000000..5b0772492
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_filters.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python
+# flake8: noqa
+
+from copy import deepcopy
+import os
+import unittest
+
+from manifestparser.filters import (
+ subsuite,
+ tags,
+ skip_if,
+ run_if,
+ fail_if,
+ enabled,
+ filterlist,
+)
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class FilterList(unittest.TestCase):
+ """Test filterlist datatype"""
+
+ def test_data_model(self):
+ foo = lambda x, y: x
+ bar = lambda x, y: x
+ baz = lambda x, y: x
+ fl = filterlist()
+
+ fl.extend([foo, bar])
+ self.assertEquals(len(fl), 2)
+ self.assertTrue(foo in fl)
+
+ fl.append(baz)
+ self.assertEquals(fl[2], baz)
+
+ fl.remove(baz)
+ self.assertFalse(baz in fl)
+
+ item = fl.pop()
+ self.assertEquals(item, bar)
+
+ self.assertEquals(fl.index(foo), 0)
+
+ del fl[0]
+ self.assertFalse(foo in fl)
+ with self.assertRaises(IndexError):
+ fl[0]
+
+ def test_add_non_callable_to_list(self):
+ fl = filterlist()
+ with self.assertRaises(TypeError):
+ fl.append('foo')
+
+ def test_add_duplicates_to_list(self):
+ foo = lambda x, y: x
+ bar = lambda x, y: x
+ sub = subsuite('foo')
+ fl = filterlist([foo, bar, sub])
+ self.assertEquals(len(fl), 3)
+ self.assertEquals(fl[0], foo)
+
+ with self.assertRaises(ValueError):
+ fl.append(foo)
+
+ with self.assertRaises(ValueError):
+ fl.append(subsuite('bar'))
+
+ def test_add_two_tags_filters(self):
+ tag1 = tags('foo')
+ tag2 = tags('bar')
+ fl = filterlist([tag1])
+
+ with self.assertRaises(ValueError):
+ fl.append(tag1)
+
+ fl.append(tag2)
+ self.assertEquals(len(fl), 2)
+
+ def test_filters_run_in_order(self):
+ a = lambda x, y: x
+ b = lambda x, y: x
+ c = lambda x, y: x
+ d = lambda x, y: x
+ e = lambda x, y: x
+ f = lambda x, y: x
+
+ fl = filterlist([a, b])
+ fl.append(c)
+ fl.extend([d, e])
+ fl += [f]
+ self.assertEquals([i for i in fl], [a, b, c, d, e, f])
+
+
+class BuiltinFilters(unittest.TestCase):
+ """Test the built-in filters"""
+
+ tests = (
+ {"name": "test0"},
+ {"name": "test1", "skip-if": "foo == 'bar'"},
+ {"name": "test2", "run-if": "foo == 'bar'"},
+ {"name": "test3", "fail-if": "foo == 'bar'"},
+ {"name": "test4", "disabled": "some reason"},
+ {"name": "test5", "subsuite": "baz"},
+ {"name": "test6", "subsuite": "baz,foo == 'bar'"},
+ {"name": "test7", "tags": "foo bar"},
+ )
+
+ def test_skip_if(self):
+ tests = deepcopy(self.tests)
+ tests = list(skip_if(tests, {}))
+ self.assertEquals(len(tests), len(self.tests))
+
+ tests = deepcopy(self.tests)
+ tests = list(skip_if(tests, {'foo': 'bar'}))
+ self.assertNotIn(self.tests[1], tests)
+
+ def test_run_if(self):
+ tests = deepcopy(self.tests)
+ tests = list(run_if(tests, {}))
+ self.assertNotIn(self.tests[2], tests)
+
+ tests = deepcopy(self.tests)
+ tests = list(run_if(tests, {'foo': 'bar'}))
+ self.assertEquals(len(tests), len(self.tests))
+
+ def test_fail_if(self):
+ tests = deepcopy(self.tests)
+ tests = list(fail_if(tests, {}))
+ self.assertNotIn('expected', tests[3])
+
+ tests = deepcopy(self.tests)
+ tests = list(fail_if(tests, {'foo': 'bar'}))
+ self.assertEquals(tests[3]['expected'], 'fail')
+
+ def test_enabled(self):
+ tests = deepcopy(self.tests)
+ tests = list(enabled(tests, {}))
+ self.assertNotIn(self.tests[4], tests)
+
+ def test_subsuite(self):
+ sub1 = subsuite()
+ sub2 = subsuite('baz')
+
+ tests = deepcopy(self.tests)
+ tests = list(sub1(tests, {}))
+ self.assertNotIn(self.tests[5], tests)
+ self.assertEquals(len(tests), len(self.tests) - 1)
+
+ tests = deepcopy(self.tests)
+ tests = list(sub2(tests, {}))
+ self.assertEquals(len(tests), 1)
+ self.assertIn(self.tests[5], tests)
+
+ def test_subsuite_condition(self):
+ sub1 = subsuite()
+ sub2 = subsuite('baz')
+
+ tests = deepcopy(self.tests)
+
+ tests = list(sub1(tests, {'foo': 'bar'}))
+ self.assertNotIn(self.tests[5], tests)
+ self.assertNotIn(self.tests[6], tests)
+
+ tests = deepcopy(self.tests)
+ tests = list(sub2(tests, {'foo': 'bar'}))
+ self.assertEquals(len(tests), 2)
+ self.assertEquals(tests[0]['name'], 'test5')
+ self.assertEquals(tests[1]['name'], 'test6')
+
+ def test_tags(self):
+ ftags1 = tags([])
+ ftags2 = tags(['bar', 'baz'])
+
+ tests = deepcopy(self.tests)
+ tests = list(ftags1(tests, {}))
+ self.assertEquals(len(tests), 0)
+
+ tests = deepcopy(self.tests)
+ tests = list(ftags2(tests, {}))
+ self.assertEquals(len(tests), 1)
+ self.assertIn(self.tests[7], tests)
diff --git a/testing/mozbase/manifestparser/tests/test_manifestparser.py b/testing/mozbase/manifestparser/tests/test_manifestparser.py
new file mode 100755
index 000000000..ca80911fb
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_manifestparser.py
@@ -0,0 +1,325 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+import unittest
+from manifestparser import ManifestParser
+from StringIO import StringIO
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestManifestParser(unittest.TestCase):
+ """
+ Test the manifest parser
+
+ You must have manifestparser installed before running these tests.
+ Run ``python manifestparser.py setup develop`` with setuptools installed.
+ """
+
+ def test_sanity(self):
+ """Ensure basic parser is sane"""
+
+ parser = ManifestParser()
+ mozmill_example = os.path.join(here, 'mozmill-example.ini')
+ parser.read(mozmill_example)
+ tests = parser.tests
+ self.assertEqual(len(tests), len(file(mozmill_example).read().strip().splitlines()))
+
+ # Ensure that capitalization and order aren't an issue:
+ lines = ['[%s]' % test['name'] for test in tests]
+ self.assertEqual(lines, file(mozmill_example).read().strip().splitlines())
+
+ # Show how you select subsets of tests:
+ mozmill_restart_example = os.path.join(here, 'mozmill-restart-example.ini')
+ parser.read(mozmill_restart_example)
+ restart_tests = parser.get(type='restart')
+ self.assertTrue(len(restart_tests) < len(parser.tests))
+ self.assertEqual(len(restart_tests), len(parser.get(manifest=mozmill_restart_example)))
+ self.assertFalse([test for test in restart_tests
+ if test['manifest'] != os.path.join(here,
+ 'mozmill-restart-example.ini')])
+ self.assertEqual(parser.get('name', tags=['foo']),
+ ['restartTests/testExtensionInstallUninstall/test2.js',
+ 'restartTests/testExtensionInstallUninstall/test1.js'])
+ self.assertEqual(parser.get('name', foo='bar'),
+ ['restartTests/testExtensionInstallUninstall/test2.js'])
+
+ def test_include(self):
+ """Illustrate how include works"""
+
+ include_example = os.path.join(here, 'include-example.ini')
+ parser = ManifestParser(manifests=(include_example,))
+
+ # All of the tests should be included, in order:
+ self.assertEqual(parser.get('name'),
+ ['crash-handling', 'fleem', 'flowers'])
+ self.assertEqual([(test['name'], os.path.basename(test['manifest']))
+ for test in parser.tests],
+ [('crash-handling', 'bar.ini'),
+ ('fleem', 'include-example.ini'),
+ ('flowers', 'foo.ini')])
+
+ # The including manifest is always reported as a part of the generated test object.
+ self.assertTrue(all([t['ancestor-manifest'] == include_example
+ for t in parser.tests if t['name'] != 'fleem']))
+
+ # The manifests should be there too:
+ self.assertEqual(len(parser.manifests()), 3)
+
+ # We already have the root directory:
+ self.assertEqual(here, parser.rootdir)
+
+ # DEFAULT values should persist across includes, unless they're
+ # overwritten. In this example, include-example.ini sets foo=bar, but
+ # it's overridden to fleem in bar.ini
+ self.assertEqual(parser.get('name', foo='bar'),
+ ['fleem', 'flowers'])
+ self.assertEqual(parser.get('name', foo='fleem'),
+ ['crash-handling'])
+
+ # Passing parameters in the include section allows defining variables in
+ # the submodule scope:
+ self.assertEqual(parser.get('name', tags=['red']),
+ ['flowers'])
+
+ # However, this should be overridable from the DEFAULT section in the
+ # included file and that overridable via the key directly connected to
+ # the test:
+ self.assertEqual(parser.get(name='flowers')[0]['blue'],
+ 'ocean')
+ self.assertEqual(parser.get(name='flowers')[0]['yellow'],
+ 'submarine')
+
+ # You can query multiple times if you need to:
+ flowers = parser.get(foo='bar')
+ self.assertEqual(len(flowers), 2)
+
+ # Using the inverse flag should invert the set of tests returned:
+ self.assertEqual(parser.get('name', inverse=True, tags=['red']),
+ ['crash-handling', 'fleem'])
+
+ # All of the included tests actually exist:
+ self.assertEqual([i['name'] for i in parser.missing()], [])
+
+ # Write the output to a manifest:
+ buffer = StringIO()
+ parser.write(fp=buffer, global_kwargs={'foo': 'bar'})
+ expected_output = """[DEFAULT]
+foo = bar
+
+[fleem]
+
+[include/flowers]
+blue = ocean
+red = roses
+yellow = submarine""" # noqa
+
+ self.assertEqual(buffer.getvalue().strip(),
+ expected_output)
+
+ def test_invalid_path(self):
+ """
+ Test invalid path should not throw when not strict
+ """
+ manifest = os.path.join(here, 'include-invalid.ini')
+ ManifestParser(manifests=(manifest,), strict=False)
+
+ def test_parent_inheritance(self):
+ """
+ Test parent manifest variable inheritance
+ Specifically tests that inherited variables from parent includes
+ properly propagate downstream
+ """
+ parent_example = os.path.join(here, 'parent', 'level_1', 'level_2',
+ 'level_3', 'level_3.ini')
+ parser = ManifestParser(manifests=(parent_example,))
+
+ # Parent manifest test should not be included
+ self.assertEqual(parser.get('name'),
+ ['test_3'])
+ self.assertEqual([(test['name'], os.path.basename(test['manifest']))
+ for test in parser.tests],
+ [('test_3', 'level_3.ini')])
+
+ # DEFAULT values should be the ones from level 1
+ self.assertEqual(parser.get('name', x='level_1'),
+ ['test_3'])
+
+ # Write the output to a manifest:
+ buffer = StringIO()
+ parser.write(fp=buffer, global_kwargs={'x': 'level_1'})
+ self.assertEqual(buffer.getvalue().strip(),
+ '[DEFAULT]\nx = level_1\n\n[test_3]')
+
+ def test_parent_defaults(self):
+ """
+ Test downstream variables should overwrite upstream variables
+ """
+ parent_example = os.path.join(here, 'parent', 'level_1', 'level_2',
+ 'level_3', 'level_3_default.ini')
+ parser = ManifestParser(manifests=(parent_example,))
+
+ # Parent manifest test should not be included
+ self.assertEqual(parser.get('name'),
+ ['test_3'])
+ self.assertEqual([(test['name'], os.path.basename(test['manifest']))
+ for test in parser.tests],
+ [('test_3', 'level_3_default.ini')])
+
+ # DEFAULT values should be the ones from level 3
+ self.assertEqual(parser.get('name', x='level_3'),
+ ['test_3'])
+
+ # Write the output to a manifest:
+ buffer = StringIO()
+ parser.write(fp=buffer, global_kwargs={'x': 'level_3'})
+ self.assertEqual(buffer.getvalue().strip(),
+ '[DEFAULT]\nx = level_3\n\n[test_3]')
+
+ def test_parent_defaults_include(self):
+ parent_example = os.path.join(here, 'parent', 'include', 'manifest.ini')
+ parser = ManifestParser(manifests=(parent_example,))
+
+ # global defaults should inherit all includes
+ self.assertEqual(parser.get('name', top='data'),
+ ['testFirst.js', 'testSecond.js'])
+
+ # include specific defaults should only inherit the actual include
+ self.assertEqual(parser.get('name', disabled='YES'),
+ ['testFirst.js'])
+ self.assertEqual(parser.get('name', disabled='NO'),
+ ['testSecond.js'])
+
+ def test_server_root(self):
+ """
+ Test server_root properly expands as an absolute path
+ """
+ server_example = os.path.join(here, 'parent', 'level_1', 'level_2',
+ 'level_3', 'level_3_server-root.ini')
+ parser = ManifestParser(manifests=(server_example,))
+
+ # A regular variable will inherit its value directly
+ self.assertEqual(parser.get('name', **{'other-root': '../root'}),
+ ['test_3'])
+
+ # server-root will expand its value as an absolute path
+ # we will not find anything for the original value
+ self.assertEqual(parser.get('name', **{'server-root': '../root'}), [])
+
+ # check that the path has expanded
+ self.assertEqual(parser.get('server-root')[0],
+ os.path.join(here, 'parent', 'root'))
+
+ def test_copy(self):
+ """Test our ability to copy a set of manifests"""
+
+ tempdir = tempfile.mkdtemp()
+ include_example = os.path.join(here, 'include-example.ini')
+ manifest = ManifestParser(manifests=(include_example,))
+ manifest.copy(tempdir)
+ self.assertEqual(sorted(os.listdir(tempdir)),
+ ['fleem', 'include', 'include-example.ini'])
+ self.assertEqual(sorted(os.listdir(os.path.join(tempdir, 'include'))),
+ ['bar.ini', 'crash-handling', 'flowers', 'foo.ini'])
+ from_manifest = ManifestParser(manifests=(include_example,))
+ to_manifest = os.path.join(tempdir, 'include-example.ini')
+ to_manifest = ManifestParser(manifests=(to_manifest,))
+ self.assertEqual(to_manifest.get('name'), from_manifest.get('name'))
+ shutil.rmtree(tempdir)
+
+ def test_path_override(self):
+ """You can override the path in the section too.
+ This shows that you can use a relative path"""
+ path_example = os.path.join(here, 'path-example.ini')
+ manifest = ManifestParser(manifests=(path_example,))
+ self.assertEqual(manifest.tests[0]['path'],
+ os.path.join(here, 'fleem'))
+
+ def test_relative_path(self):
+ """
+ Relative test paths are correctly calculated.
+ """
+ relative_path = os.path.join(here, 'relative-path.ini')
+ manifest = ManifestParser(manifests=(relative_path,))
+ self.assertEqual(manifest.tests[0]['path'],
+ os.path.join(os.path.dirname(here), 'fleem'))
+ self.assertEqual(manifest.tests[0]['relpath'],
+ os.path.join('..', 'fleem'))
+ self.assertEqual(manifest.tests[1]['relpath'],
+ os.path.join('..', 'testsSIBLING', 'example'))
+
+ def test_path_from_fd(self):
+ """
+ Test paths are left untouched when manifest is a file-like object.
+ """
+ fp = StringIO("[section]\npath=fleem")
+ manifest = ManifestParser(manifests=(fp,))
+ self.assertEqual(manifest.tests[0]['path'], 'fleem')
+ self.assertEqual(manifest.tests[0]['relpath'], 'fleem')
+ self.assertEqual(manifest.tests[0]['manifest'], None)
+
+ def test_comments(self):
+ """
+ ensure comments work, see
+ https://bugzilla.mozilla.org/show_bug.cgi?id=813674
+ """
+ comment_example = os.path.join(here, 'comment-example.ini')
+ manifest = ManifestParser(manifests=(comment_example,))
+ self.assertEqual(len(manifest.tests), 8)
+ names = [i['name'] for i in manifest.tests]
+ self.assertFalse('test_0202_app_launch_apply_update_dirlocked.js' in names)
+
+ def test_verifyDirectory(self):
+
+ directory = os.path.join(here, 'verifyDirectory')
+
+ # correct manifest
+ manifest_path = os.path.join(directory, 'verifyDirectory.ini')
+ manifest = ManifestParser(manifests=(manifest_path,))
+ missing = manifest.verifyDirectory(directory, extensions=('.js',))
+ self.assertEqual(missing, (set(), set()))
+
+ # manifest is missing test_1.js
+ test_1 = os.path.join(directory, 'test_1.js')
+ manifest_path = os.path.join(directory, 'verifyDirectory_incomplete.ini')
+ manifest = ManifestParser(manifests=(manifest_path,))
+ missing = manifest.verifyDirectory(directory, extensions=('.js',))
+ self.assertEqual(missing, (set(), set([test_1])))
+
+ # filesystem is missing test_notappearinginthisfilm.js
+ missing_test = os.path.join(directory, 'test_notappearinginthisfilm.js')
+ manifest_path = os.path.join(directory, 'verifyDirectory_toocomplete.ini')
+ manifest = ManifestParser(manifests=(manifest_path,))
+ missing = manifest.verifyDirectory(directory, extensions=('.js',))
+ self.assertEqual(missing, (set([missing_test]), set()))
+
+ def test_just_defaults(self):
+ """Ensure a manifest with just a DEFAULT section exposes that data."""
+
+ parser = ManifestParser()
+ manifest = os.path.join(here, 'just-defaults.ini')
+ parser.read(manifest)
+ self.assertEqual(len(parser.tests), 0)
+ self.assertTrue(manifest in parser.manifest_defaults)
+ self.assertEquals(parser.manifest_defaults[manifest]['foo'], 'bar')
+
+ def test_manifest_list(self):
+ """
+ Ensure a manifest with just a DEFAULT section still returns
+ itself from the manifests() method.
+ """
+
+ parser = ManifestParser()
+ manifest = os.path.join(here, 'no-tests.ini')
+ parser.read(manifest)
+ self.assertEqual(len(parser.tests), 0)
+ self.assertTrue(len(parser.manifests()) == 1)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/test_read_ini.py b/testing/mozbase/manifestparser/tests/test_read_ini.py
new file mode 100755
index 000000000..df4a8973b
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_read_ini.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+"""
+test .ini parsing
+
+ensure our .ini parser is doing what we want; to be deprecated for
+python's standard ConfigParser when 2.7 is reality so OrderedDict
+is the default:
+
+http://docs.python.org/2/library/configparser.html
+"""
+
+import unittest
+from manifestparser import read_ini
+from ConfigParser import ConfigParser
+from StringIO import StringIO
+
+
+class IniParserTest(unittest.TestCase):
+
+ def test_inline_comments(self):
+ """
+ We have no inline comments; so we're testing to ensure we don't:
+ https://bugzilla.mozilla.org/show_bug.cgi?id=855288
+ """
+
+ # test '#' inline comments (really, the lack thereof)
+ string = """[test_felinicity.py]
+kittens = true # This test requires kittens
+"""
+ buffer = StringIO()
+ buffer.write(string)
+ buffer.seek(0)
+ result = read_ini(buffer)[0][1]['kittens']
+ self.assertEqual(result, "true # This test requires kittens")
+
+ # compare this to ConfigParser
+ # python 2.7 ConfigParser does not support '#' as an
+ # inline comment delimeter (for "backwards compatability"):
+ # http://docs.python.org/2/library/configparser.html
+ buffer.seek(0)
+ parser = ConfigParser()
+ parser.readfp(buffer)
+ control = parser.get('test_felinicity.py', 'kittens')
+ self.assertEqual(result, control)
+
+ # test ';' inline comments (really, the lack thereof)
+ string = string.replace('#', ';')
+ buffer = StringIO()
+ buffer.write(string)
+ buffer.seek(0)
+ result = read_ini(buffer)[0][1]['kittens']
+ self.assertEqual(result, "true ; This test requires kittens")
+
+ # compare this to ConfigParser
+ # python 2.7 ConfigParser *does* support ';' as an
+ # inline comment delimeter (ibid).
+ # Python 3.x configparser, OTOH, does not support
+ # inline-comments by default. It does support their specification,
+ # though they are weakly discouraged:
+ # http://docs.python.org/dev/library/configparser.html
+ buffer.seek(0)
+ parser = ConfigParser()
+ parser.readfp(buffer)
+ control = parser.get('test_felinicity.py', 'kittens')
+ self.assertNotEqual(result, control)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/test_testmanifest.py b/testing/mozbase/manifestparser/tests/test_testmanifest.py
new file mode 100644
index 000000000..5f79dd48a
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/test_testmanifest.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+import os
+import shutil
+import tempfile
+import unittest
+
+from manifestparser import TestManifest, ParseError
+from manifestparser.filters import subsuite
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestTestManifest(unittest.TestCase):
+ """Test the Test Manifest"""
+
+ def test_testmanifest(self):
+ # Test filtering based on platform:
+ filter_example = os.path.join(here, 'filter-example.ini')
+ manifest = TestManifest(manifests=(filter_example,), strict=False)
+ self.assertEqual([i['name'] for i in manifest.active_tests(os='win', disabled=False,
+ exists=False)],
+ ['windowstest', 'fleem'])
+ self.assertEqual([i['name'] for i in manifest.active_tests(os='linux', disabled=False,
+ exists=False)],
+ ['fleem', 'linuxtest'])
+
+ # Look for existing tests. There is only one:
+ self.assertEqual([i['name'] for i in manifest.active_tests()],
+ ['fleem'])
+
+ # You should be able to expect failures:
+ last = manifest.active_tests(exists=False, toolkit='gtk2')[-1]
+ self.assertEqual(last['name'], 'linuxtest')
+ self.assertEqual(last['expected'], 'pass')
+ last = manifest.active_tests(exists=False, toolkit='cocoa')[-1]
+ self.assertEqual(last['expected'], 'fail')
+
+ def test_missing_paths(self):
+ """
+ Test paths that don't exist raise an exception in strict mode.
+ """
+ tempdir = tempfile.mkdtemp()
+
+ missing_path = os.path.join(here, 'missing-path.ini')
+ manifest = TestManifest(manifests=(missing_path,), strict=True)
+ self.assertRaises(IOError, manifest.active_tests)
+ self.assertRaises(IOError, manifest.copy, tempdir)
+ self.assertRaises(IOError, manifest.update, tempdir)
+
+ shutil.rmtree(tempdir)
+
+ def test_comments(self):
+ """
+ ensure comments work, see
+ https://bugzilla.mozilla.org/show_bug.cgi?id=813674
+ """
+ comment_example = os.path.join(here, 'comment-example.ini')
+ manifest = TestManifest(manifests=(comment_example,))
+ self.assertEqual(len(manifest.tests), 8)
+ names = [i['name'] for i in manifest.tests]
+ self.assertFalse('test_0202_app_launch_apply_update_dirlocked.js' in names)
+
+ def test_manifest_subsuites(self):
+ """
+ test subsuites and conditional subsuites
+ """
+ relative_path = os.path.join(here, 'subsuite.ini')
+ manifest = TestManifest(manifests=(relative_path,))
+ info = {'foo': 'bar'}
+
+ # 6 tests total
+ tests = manifest.active_tests(exists=False, **info)
+ self.assertEquals(len(tests), 6)
+
+ # only 3 tests for subsuite bar when foo==bar
+ tests = manifest.active_tests(exists=False,
+ filters=[subsuite('bar')],
+ **info)
+ self.assertEquals(len(tests), 3)
+
+ # only 1 test for subsuite baz, regardless of conditions
+ other = {'something': 'else'}
+ tests = manifest.active_tests(exists=False,
+ filters=[subsuite('baz')],
+ **info)
+ self.assertEquals(len(tests), 1)
+ tests = manifest.active_tests(exists=False,
+ filters=[subsuite('baz')],
+ **other)
+ self.assertEquals(len(tests), 1)
+
+ # 4 tests match when the condition doesn't match (all tests except
+ # the unconditional subsuite)
+ info = {'foo': 'blah'}
+ tests = manifest.active_tests(exists=False,
+ filters=[subsuite()],
+ **info)
+ self.assertEquals(len(tests), 5)
+
+ # test for illegal subsuite value
+ manifest.tests[0]['subsuite'] = 'subsuite=bar,foo=="bar",type="nothing"'
+ with self.assertRaises(ParseError):
+ manifest.active_tests(exists=False,
+ filters=[subsuite('foo')],
+ **info)
+
+ def test_none_and_empty_manifest(self):
+ """
+ Test TestManifest for None and empty manifest, see
+ https://bugzilla.mozilla.org/show_bug.cgi?id=1087682
+ """
+ none_manifest = TestManifest(manifests=None, strict=False)
+ self.assertEqual(len(none_manifest.test_paths()), 0)
+ self.assertEqual(len(none_manifest.active_tests()), 0)
+
+ empty_manifest = TestManifest(manifests=[], strict=False)
+ self.assertEqual(len(empty_manifest.test_paths()), 0)
+ self.assertEqual(len(empty_manifest.active_tests()), 0)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini
new file mode 100644
index 000000000..509ebd62e
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini
@@ -0,0 +1 @@
+[test_sub.js]
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js
new file mode 100644
index 000000000..df48720d9
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js
@@ -0,0 +1 @@
+// test_sub.js
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js b/testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js
new file mode 100644
index 000000000..c5a966f46
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js
@@ -0,0 +1 @@
+// test_1.js
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js b/testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js
new file mode 100644
index 000000000..d8648599c
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js
@@ -0,0 +1 @@
+// test_2.js
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js b/testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js
new file mode 100644
index 000000000..794bc2c34
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js
@@ -0,0 +1 @@
+// test_3.js
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini
new file mode 100644
index 000000000..10e0c79c8
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini
@@ -0,0 +1,4 @@
+[test_1.js]
+[test_2.js]
+[test_3.js]
+[include:subdir/manifest.ini]
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini
new file mode 100644
index 000000000..cde526acf
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini
@@ -0,0 +1,3 @@
+[test_2.js]
+[test_3.js]
+[include:subdir/manifest.ini]
diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini
new file mode 100644
index 000000000..88994ae26
--- /dev/null
+++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini
@@ -0,0 +1,5 @@
+[test_1.js]
+[test_2.js]
+[test_3.js]
+[test_notappearinginthisfilm.js]
+[include:subdir/manifest.ini]
diff --git a/testing/mozbase/moz.build b/testing/mozbase/moz.build
new file mode 100644
index 000000000..172f4e728
--- /dev/null
+++ b/testing/mozbase/moz.build
@@ -0,0 +1,38 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+PYTHON_UNIT_TESTS += [
+ 'test.py',
+]
+
+python_modules = [
+ 'manifestparser',
+ 'mozcrash',
+ 'mozdebug',
+ 'mozdevice',
+ 'mozfile',
+ 'mozhttpd',
+ 'mozinfo',
+ 'mozinstall',
+ 'mozleak',
+ 'mozlog',
+ 'moznetwork',
+ 'mozprocess',
+ 'mozprofile',
+ 'mozrunner',
+ 'mozscreenshot',
+ 'mozsystemmonitor',
+ 'moztest',
+ 'mozversion',
+]
+
+TEST_HARNESS_FILES.mozbase += [m + '/**' for m in python_modules]
+
+TEST_HARNESS_FILES.mozbase += [
+ 'setup_development.py',
+ 'test-manifest.ini',
+ 'test.py',
+]
diff --git a/testing/mozbase/mozcrash/mozcrash/__init__.py b/testing/mozbase/mozcrash/mozcrash/__init__.py
new file mode 100644
index 000000000..ec95442cf
--- /dev/null
+++ b/testing/mozbase/mozcrash/mozcrash/__init__.py
@@ -0,0 +1,10 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+mozcrash is a library for getting a stack trace out of processes that have crashed
+and left behind a minidump file using the Google Breakpad library.
+"""
+
+from mozcrash import *
diff --git a/testing/mozbase/mozcrash/mozcrash/mozcrash.py b/testing/mozbase/mozcrash/mozcrash/mozcrash.py
new file mode 100644
index 000000000..c39e68f3a
--- /dev/null
+++ b/testing/mozbase/mozcrash/mozcrash/mozcrash.py
@@ -0,0 +1,557 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import glob
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import tempfile
+import urllib2
+import zipfile
+from collections import namedtuple
+
+import mozfile
+import mozinfo
+import mozlog
+
+__all__ = [
+ 'check_for_crashes',
+ 'check_for_java_exception',
+ 'kill_and_get_minidump',
+ 'log_crashes',
+ 'cleanup_pending_crash_reports',
+]
+
+
+StackInfo = namedtuple("StackInfo",
+ ["minidump_path",
+ "signature",
+ "stackwalk_stdout",
+ "stackwalk_stderr",
+ "stackwalk_retcode",
+ "stackwalk_errors",
+ "extra"])
+
+
+def get_logger():
+ structured_logger = mozlog.get_default_logger("mozcrash")
+ if structured_logger is None:
+ return mozlog.unstructured.getLogger('mozcrash')
+ return structured_logger
+
+
+def check_for_crashes(dump_directory,
+ symbols_path=None,
+ stackwalk_binary=None,
+ dump_save_path=None,
+ test_name=None,
+ quiet=False):
+ """
+ Print a stack trace for minidump files left behind by a crashing program.
+
+ `dump_directory` will be searched for minidump files. Any minidump files found will
+ have `stackwalk_binary` executed on them, with `symbols_path` passed as an extra
+ argument.
+
+ `stackwalk_binary` should be a path to the minidump_stackwalk binary.
+ If `stackwalk_binary` is not set, the MINIDUMP_STACKWALK environment variable
+ will be checked and its value used if it is not empty.
+
+ `symbols_path` should be a path to a directory containing symbols to use for
+ dump processing. This can either be a path to a directory containing Breakpad-format
+ symbols, or a URL to a zip file containing a set of symbols.
+
+ If `dump_save_path` is set, it should be a path to a directory in which to copy minidump
+ files for safekeeping after a stack trace has been printed. If not set, the environment
+ variable MINIDUMP_SAVE_PATH will be checked and its value used if it is not empty.
+
+ If `test_name` is set it will be used as the test name in log output. If not set the
+ filename of the calling function will be used.
+
+ If `quiet` is set, no PROCESS-CRASH message will be printed to stdout if a
+ crash is detected.
+
+ Returns number of minidump files found.
+ """
+
+ # try to get the caller's filename if no test name is given
+ if test_name is None:
+ try:
+ test_name = os.path.basename(sys._getframe(1).f_code.co_filename)
+ except:
+ test_name = "unknown"
+
+ crash_info = CrashInfo(dump_directory, symbols_path, dump_save_path=dump_save_path,
+ stackwalk_binary=stackwalk_binary)
+
+ if not crash_info.has_dumps:
+ return False
+
+ crash_count = 0
+ for info in crash_info:
+ crash_count += 1
+ if not quiet:
+ stackwalk_output = ["Crash dump filename: %s" % info.minidump_path]
+ if info.stackwalk_stderr:
+ stackwalk_output.append("stderr from minidump_stackwalk:")
+ stackwalk_output.append(info.stackwalk_stderr)
+ elif info.stackwalk_stdout is not None:
+ stackwalk_output.append(info.stackwalk_stdout)
+ if info.stackwalk_retcode is not None and info.stackwalk_retcode != 0:
+ stackwalk_output.append("minidump_stackwalk exited with return code %d" %
+ info.stackwalk_retcode)
+ signature = info.signature if info.signature else "unknown top frame"
+ print "PROCESS-CRASH | %s | application crashed [%s]" % (test_name,
+ signature)
+ print '\n'.join(stackwalk_output)
+ print '\n'.join(info.stackwalk_errors)
+
+ return crash_count
+
+
+def log_crashes(logger,
+ dump_directory,
+ symbols_path,
+ process=None,
+ test=None,
+ stackwalk_binary=None,
+ dump_save_path=None):
+ """Log crashes using a structured logger"""
+ crash_count = 0
+ for info in CrashInfo(dump_directory, symbols_path, dump_save_path=dump_save_path,
+ stackwalk_binary=stackwalk_binary):
+ crash_count += 1
+ kwargs = info._asdict()
+ kwargs.pop("extra")
+ logger.crash(process=process, test=test, **kwargs)
+ return crash_count
+
+
+class CrashInfo(object):
+ """Get information about a crash based on dump files.
+
+ Typical usage is to iterate over the CrashInfo object. This returns StackInfo
+ objects, one for each crash dump file that is found in the dump_directory.
+
+ :param dump_directory: Path to search for minidump files
+ :param symbols_path: Path to a path to a directory containing symbols to use for
+ dump processing. This can either be a path to a directory
+ containing Breakpad-format symbols, or a URL to a zip file
+ containing a set of symbols.
+ :param dump_save_path: Path to which to save the dump files. If this is None,
+ the MINIDUMP_SAVE_PATH environment variable will be used.
+ :param stackwalk_binary: Path to the minidump_stackwalk binary. If this is None,
+ the MINIDUMP_STACKWALK environment variable will be used
+ as the path to the minidump binary."""
+
+ def __init__(self, dump_directory, symbols_path, dump_save_path=None,
+ stackwalk_binary=None):
+ self.dump_directory = dump_directory
+ self.symbols_path = symbols_path
+ self.remove_symbols = False
+
+ if dump_save_path is None:
+ dump_save_path = os.environ.get('MINIDUMP_SAVE_PATH', None)
+ self.dump_save_path = dump_save_path
+
+ if stackwalk_binary is None:
+ stackwalk_binary = os.environ.get('MINIDUMP_STACKWALK', None)
+ self.stackwalk_binary = stackwalk_binary
+
+ self.logger = get_logger()
+ self._dump_files = None
+
+ def _get_symbols(self):
+ # If no symbols path has been set create a temporary folder to let the
+ # minidump stackwalk download the symbols.
+ if not self.symbols_path:
+ self.symbols_path = tempfile.mkdtemp()
+ self.remove_symbols = True
+
+ # This updates self.symbols_path so we only download once.
+ if mozfile.is_url(self.symbols_path):
+ self.remove_symbols = True
+ self.logger.info("Downloading symbols from: %s" % self.symbols_path)
+ # Get the symbols and write them to a temporary zipfile
+ data = urllib2.urlopen(self.symbols_path)
+ with tempfile.TemporaryFile() as symbols_file:
+ symbols_file.write(data.read())
+ # extract symbols to a temporary directory (which we'll delete after
+ # processing all crashes)
+ self.symbols_path = tempfile.mkdtemp()
+ with zipfile.ZipFile(symbols_file, 'r') as zfile:
+ mozfile.extract_zip(zfile, self.symbols_path)
+
+ @property
+ def dump_files(self):
+ """List of tuple (path_to_dump_file, path_to_extra_file) for each dump
+ file in self.dump_directory. The extra files may not exist."""
+ if self._dump_files is None:
+ self._dump_files = [(path, os.path.splitext(path)[0] + '.extra') for path in
+ glob.glob(os.path.join(self.dump_directory, '*.dmp'))]
+ max_dumps = 10
+ if len(self._dump_files) > max_dumps:
+ self.logger.warning("Found %d dump files -- limited to %d!" %
+ (len(self._dump_files), max_dumps))
+ del self._dump_files[max_dumps:]
+
+ return self._dump_files
+
+ @property
+ def has_dumps(self):
+ """Boolean indicating whether any crash dump files were found in the
+ current directory"""
+ return len(self.dump_files) > 0
+
+ def __iter__(self):
+ for path, extra in self.dump_files:
+ rv = self._process_dump_file(path, extra)
+ yield rv
+
+ if self.remove_symbols:
+ mozfile.remove(self.symbols_path)
+
+ def _process_dump_file(self, path, extra):
+ """Process a single dump file using self.stackwalk_binary, and return a
+ tuple containing properties of the crash dump.
+
+ :param path: Path to the minidump file to analyse
+ :return: A StackInfo tuple with the fields::
+ minidump_path: Path of the dump file
+ signature: The top frame of the stack trace, or None if it
+ could not be determined.
+ stackwalk_stdout: String of stdout data from stackwalk
+ stackwalk_stderr: String of stderr data from stackwalk or
+ None if it succeeded
+ stackwalk_retcode: Return code from stackwalk
+ stackwalk_errors: List of errors in human-readable form that prevented
+ stackwalk being launched.
+ """
+ self._get_symbols()
+
+ errors = []
+ signature = None
+ include_stderr = False
+ out = None
+ err = None
+ retcode = None
+ if (self.symbols_path and self.stackwalk_binary and
+ os.path.exists(self.stackwalk_binary) and
+ os.access(self.stackwalk_binary, os.X_OK)):
+
+ command = [
+ self.stackwalk_binary,
+ path,
+ self.symbols_path
+ ]
+ self.logger.info('Copy/paste: ' + ' '.join(command))
+ # run minidump_stackwalk
+ p = subprocess.Popen(
+ command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE
+ )
+ (out, err) = p.communicate()
+ retcode = p.returncode
+
+ if len(out) > 3:
+ # minidump_stackwalk is chatty,
+ # so ignore stderr when it succeeds.
+ # The top frame of the crash is always the line after "Thread N (crashed)"
+ # Examples:
+ # 0 libc.so + 0xa888
+ # 0 libnss3.so!nssCertificate_Destroy [certificate.c : 102 + 0x0]
+ # 0 mozjs.dll!js::GlobalObject::getDebuggers() [GlobalObject.cpp:89df18f9b6da : 580 + 0x0] # noqa
+ # 0 libxul.so!void js::gc::MarkInternal<JSObject>(JSTracer*, JSObject**)
+ # [Marking.cpp : 92 + 0x28]
+ lines = out.splitlines()
+ for i, line in enumerate(lines):
+ if "(crashed)" in line:
+ match = re.search(r"^ 0 (?:.*!)?(?:void )?([^\[]+)", lines[i + 1])
+ if match:
+ signature = "@ %s" % match.group(1).strip()
+ break
+ else:
+ include_stderr = True
+
+ else:
+ if not self.symbols_path:
+ errors.append("No symbols path given, can't process dump.")
+ if not self.stackwalk_binary:
+ errors.append("MINIDUMP_STACKWALK not set, can't process dump.")
+ elif self.stackwalk_binary and not os.path.exists(self.stackwalk_binary):
+ errors.append("MINIDUMP_STACKWALK binary not found: %s" % self.stackwalk_binary)
+ elif not os.access(self.stackwalk_binary, os.X_OK):
+ errors.append('This user cannot execute the MINIDUMP_STACKWALK binary.')
+
+ if self.dump_save_path:
+ self._save_dump_file(path, extra)
+
+ if os.path.exists(path):
+ mozfile.remove(path)
+ if os.path.exists(extra):
+ mozfile.remove(extra)
+
+ return StackInfo(path,
+ signature,
+ out,
+ err if include_stderr else None,
+ retcode,
+ errors,
+ extra)
+
+ def _save_dump_file(self, path, extra):
+ if os.path.isfile(self.dump_save_path):
+ os.unlink(self.dump_save_path)
+ if not os.path.isdir(self.dump_save_path):
+ try:
+ os.makedirs(self.dump_save_path)
+ except OSError:
+ pass
+
+ shutil.move(path, self.dump_save_path)
+ self.logger.info("Saved minidump as %s" %
+ os.path.join(self.dump_save_path, os.path.basename(path)))
+
+ if os.path.isfile(extra):
+ shutil.move(extra, self.dump_save_path)
+ self.logger.info("Saved app info as %s" %
+ os.path.join(self.dump_save_path, os.path.basename(extra)))
+
+
+def check_for_java_exception(logcat, test_name=None, quiet=False):
+ """
+ Print a summary of a fatal Java exception, if present in the provided
+ logcat output.
+
+ Example:
+ PROCESS-CRASH | <test-name> | java-exception java.lang.NullPointerException at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833) # noqa
+
+ `logcat` should be a list of strings.
+
+ If `test_name` is set it will be used as the test name in log output. If not set the
+ filename of the calling function will be used.
+
+ If `quiet` is set, no PROCESS-CRASH message will be printed to stdout if a
+ crash is detected.
+
+ Returns True if a fatal Java exception was found, False otherwise.
+ """
+
+ # try to get the caller's filename if no test name is given
+ if test_name is None:
+ try:
+ test_name = os.path.basename(sys._getframe(1).f_code.co_filename)
+ except:
+ test_name = "unknown"
+
+ found_exception = False
+
+ for i, line in enumerate(logcat):
+ # Logs will be of form:
+ #
+ # 01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> REPORTING UNCAUGHT EXCEPTION FROM THREAD 9 ("GeckoBackgroundThread") # noqa
+ # 01-30 20:15:41.937 E/GeckoAppShell( 1703): java.lang.NullPointerException
+ # 01-30 20:15:41.937 E/GeckoAppShell( 1703): at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833) # noqa
+ # 01-30 20:15:41.937 E/GeckoAppShell( 1703): at android.os.Handler.handleCallback(Handler.java:587) # noqa
+ if "REPORTING UNCAUGHT EXCEPTION" in line:
+ # Strip away the date, time, logcat tag and pid from the next two lines and
+ # concatenate the remainder to form a concise summary of the exception.
+ found_exception = True
+ if len(logcat) >= i + 3:
+ logre = re.compile(r".*\): \t?(.*)")
+ m = logre.search(logcat[i + 1])
+ if m and m.group(1):
+ exception_type = m.group(1)
+ m = logre.search(logcat[i + 2])
+ if m and m.group(1):
+ exception_location = m.group(1)
+ if not quiet:
+ print "PROCESS-CRASH | %s | java-exception %s %s" % (test_name,
+ exception_type,
+ exception_location)
+ else:
+ print "Automation Error: java exception in logcat at line " \
+ "%d of %d: %s" % (i, len(logcat), line)
+ break
+
+ return found_exception
+
+if mozinfo.isWin:
+ import ctypes
+ import uuid
+
+ kernel32 = ctypes.windll.kernel32
+ OpenProcess = kernel32.OpenProcess
+ CloseHandle = kernel32.CloseHandle
+
+ def write_minidump(pid, dump_directory, utility_path):
+ """
+ Write a minidump for a process.
+
+ :param pid: PID of the process to write a minidump for.
+ :param dump_directory: Directory in which to write the minidump.
+ """
+ PROCESS_QUERY_INFORMATION = 0x0400
+ PROCESS_VM_READ = 0x0010
+ GENERIC_READ = 0x80000000
+ GENERIC_WRITE = 0x40000000
+ CREATE_ALWAYS = 2
+ FILE_ATTRIBUTE_NORMAL = 0x80
+ INVALID_HANDLE_VALUE = -1
+
+ file_name = os.path.join(dump_directory,
+ str(uuid.uuid4()) + ".dmp")
+
+ if (mozinfo.info['bits'] != ctypes.sizeof(ctypes.c_voidp) * 8 and
+ utility_path):
+ # We're not going to be able to write a minidump with ctypes if our
+ # python process was compiled for a different architecture than
+ # firefox, so we invoke the minidumpwriter utility program.
+
+ log = get_logger()
+ minidumpwriter = os.path.normpath(os.path.join(utility_path,
+ "minidumpwriter.exe"))
+ log.info("Using %s to write a dump to %s for [%d]" %
+ (minidumpwriter, file_name, pid))
+ if not os.path.exists(minidumpwriter):
+ log.error("minidumpwriter not found in %s" % utility_path)
+ return
+
+ if isinstance(file_name, unicode):
+ # Convert to a byte string before sending to the shell.
+ file_name = file_name.encode(sys.getfilesystemencoding())
+
+ status = subprocess.Popen([minidumpwriter, str(pid), file_name]).wait()
+ if status:
+ log.error("minidumpwriter exited with status: %d" % status)
+ return
+
+ proc_handle = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
+ 0, pid)
+ if not proc_handle:
+ return
+
+ if not isinstance(file_name, unicode):
+ # Convert to unicode explicitly so our path will be valid as input
+ # to CreateFileW
+ file_name = unicode(file_name, sys.getfilesystemencoding())
+
+ file_handle = kernel32.CreateFileW(file_name,
+ GENERIC_READ | GENERIC_WRITE,
+ 0,
+ None,
+ CREATE_ALWAYS,
+ FILE_ATTRIBUTE_NORMAL,
+ None)
+ if file_handle != INVALID_HANDLE_VALUE:
+ ctypes.windll.dbghelp.MiniDumpWriteDump(proc_handle,
+ pid,
+ file_handle,
+ # Dump type - MiniDumpNormal
+ 0,
+ # Exception parameter
+ None,
+ # User stream parameter
+ None,
+ # Callback parameter
+ None)
+ CloseHandle(file_handle)
+ CloseHandle(proc_handle)
+
+ def kill_pid(pid):
+ """
+ Terminate a process with extreme prejudice.
+
+ :param pid: PID of the process to terminate.
+ """
+ PROCESS_TERMINATE = 0x0001
+ handle = OpenProcess(PROCESS_TERMINATE, 0, pid)
+ if handle:
+ kernel32.TerminateProcess(handle, 1)
+ CloseHandle(handle)
+else:
+ def kill_pid(pid):
+ """
+ Terminate a process with extreme prejudice.
+
+ :param pid: PID of the process to terminate.
+ """
+ os.kill(pid, signal.SIGKILL)
+
+
+def kill_and_get_minidump(pid, dump_directory, utility_path=None):
+ """
+ Attempt to kill a process and leave behind a minidump describing its
+ execution state.
+
+ :param pid: The PID of the process to kill.
+ :param dump_directory: The directory where a minidump should be written on
+ Windows, where the dump will be written from outside the process.
+
+ On Windows a dump will be written using the MiniDumpWriteDump function
+ from DbgHelp.dll. On Linux and OS X the process will be sent a SIGABRT
+ signal to trigger minidump writing via a Breakpad signal handler. On other
+ platforms the process will simply be killed via SIGKILL.
+
+ If the process is hung in such a way that it cannot respond to SIGABRT
+ it may still be running after this function returns. In that case it
+ is the caller's responsibility to deal with killing it.
+ """
+ needs_killing = True
+ if mozinfo.isWin:
+ write_minidump(pid, dump_directory, utility_path)
+ elif mozinfo.isLinux or mozinfo.isMac:
+ os.kill(pid, signal.SIGABRT)
+ needs_killing = False
+ if needs_killing:
+ kill_pid(pid)
+
+
+def cleanup_pending_crash_reports():
+ """
+ Delete any pending crash reports.
+
+ The presence of pending crash reports may be reported by the browser,
+ affecting test results; it is best to ensure that these are removed
+ before starting any browser tests.
+
+ Firefox stores pending crash reports in "<UAppData>/Crash Reports".
+ If the browser is not running, it cannot provide <UAppData>, so this
+ code tries to anticipate its value.
+
+ See dom/system/OSFileConstants.cpp for platform variations of <UAppData>.
+ """
+ if mozinfo.isWin:
+ location = os.path.expanduser("~\\AppData\\Roaming\\Mozilla\\Firefox\\Crash Reports")
+ elif mozinfo.isMac:
+ location = os.path.expanduser("~/Library/Application Support/firefox/Crash Reports")
+ else:
+ location = os.path.expanduser("~/.mozilla/firefox/Crash Reports")
+ logger = get_logger()
+ if os.path.exists(location):
+ try:
+ mozfile.remove(location)
+ logger.info("Removed pending crash reports at '%s'" % location)
+ except:
+ pass
+
+
+if __name__ == '__main__':
+ import argparse
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--stackwalk-binary', '-b')
+ parser.add_argument('--dump-save-path', '-o')
+ parser.add_argument('--test-name', '-n')
+ parser.add_argument('dump_directory')
+ parser.add_argument('symbols_path')
+ args = parser.parse_args()
+
+ check_for_crashes(args.dump_directory, args.symbols_path,
+ stackwalk_binary=args.stackwalk_binary,
+ dump_save_path=args.dump_save_path,
+ test_name=args.test_name)
diff --git a/testing/mozbase/mozcrash/setup.py b/testing/mozbase/mozcrash/setup.py
new file mode 100644
index 000000000..da5ffa19b
--- /dev/null
+++ b/testing/mozbase/mozcrash/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_NAME = 'mozcrash'
+PACKAGE_VERSION = '1.0'
+
+# dependencies
+deps = ['mozfile >= 1.0',
+ 'mozlog >= 3.0']
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Library for printing stack traces from minidumps "
+ "left behind by crashed processes",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozcrash'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ )
diff --git a/testing/mozbase/mozcrash/tests/manifest.ini b/testing/mozbase/mozcrash/tests/manifest.ini
new file mode 100644
index 000000000..528fdea7b
--- /dev/null
+++ b/testing/mozbase/mozcrash/tests/manifest.ini
@@ -0,0 +1 @@
+[test.py]
diff --git a/testing/mozbase/mozcrash/tests/test.py b/testing/mozbase/mozcrash/tests/test.py
new file mode 100644
index 000000000..8f6b14f50
--- /dev/null
+++ b/testing/mozbase/mozcrash/tests/test.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+import subprocess
+import tempfile
+import shutil
+import urlparse
+import zipfile
+import StringIO
+import mozcrash
+import mozhttpd
+import mozlog.unstructured as mozlog
+
+# Make logs go away
+log = mozlog.getLogger("mozcrash", handler=mozlog.FileHandler(os.devnull))
+
+
+def popen_factory(stdouts):
+ """
+ Generate a class that can mock subprocess.Popen. |stdouts| is an iterable that
+ should return an iterable for the stdout of each process in turn.
+ """
+ class mock_popen(object):
+
+ def __init__(self, args, *args_rest, **kwargs):
+ self.stdout = stdouts.next()
+ self.returncode = 0
+
+ def wait(self):
+ return 0
+
+ def communicate(self):
+ return (self.stdout.next(), "")
+
+ return mock_popen
+
+
+class TestCrash(unittest.TestCase):
+
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+ # a fake file to use as a stackwalk binary
+ self.stackwalk = os.path.join(self.tempdir, "stackwalk")
+ open(self.stackwalk, "w").write("fake binary")
+ self._subprocess_popen = subprocess.Popen
+ subprocess.Popen = popen_factory(self.next_mock_stdout())
+ self.stdouts = []
+
+ def tearDown(self):
+ subprocess.Popen = self._subprocess_popen
+ shutil.rmtree(self.tempdir)
+
+ def next_mock_stdout(self):
+ if not self.stdouts:
+ yield iter([])
+ for s in self.stdouts:
+ yield iter(s)
+
+ def test_nodumps(self):
+ """
+ Test that check_for_crashes returns False if no dumps are present.
+ """
+ self.stdouts.append(["this is some output"])
+ self.assertFalse(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ stackwalk_binary=self.stackwalk,
+ quiet=True))
+
+ def test_simple(self):
+ """
+ Test that check_for_crashes returns True if a dump is present.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ self.stdouts.append(["this is some output"])
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ stackwalk_binary=self.stackwalk,
+ quiet=True))
+
+ def test_stackwalk_envvar(self):
+ """
+ Test that check_for_crashes uses the MINIDUMP_STACKWALK environment var.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ self.stdouts.append(["this is some output"])
+ os.environ['MINIDUMP_STACKWALK'] = self.stackwalk
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ quiet=True))
+ del os.environ['MINIDUMP_STACKWALK']
+
+ def test_save_path(self):
+ """
+ Test that dump_save_path works.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ open(os.path.join(self.tempdir, "test.extra"), "w").write("bar")
+ save_path = os.path.join(self.tempdir, "saved")
+ os.mkdir(save_path)
+ self.stdouts.append(["this is some output"])
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ stackwalk_binary=self.stackwalk,
+ dump_save_path=save_path,
+ quiet=True))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.dmp")))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.extra")))
+
+ def test_save_path_not_present(self):
+ """
+ Test that dump_save_path works when the directory doesn't exist.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ open(os.path.join(self.tempdir, "test.extra"), "w").write("bar")
+ save_path = os.path.join(self.tempdir, "saved")
+ self.stdouts.append(["this is some output"])
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ stackwalk_binary=self.stackwalk,
+ dump_save_path=save_path,
+ quiet=True))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.dmp")))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.extra")))
+
+ def test_save_path_isfile(self):
+ """
+ Test that dump_save_path works when the directory doesn't exist,
+ but a file with the same name exists.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ open(os.path.join(self.tempdir, "test.extra"), "w").write("bar")
+ save_path = os.path.join(self.tempdir, "saved")
+ open(save_path, "w").write("junk")
+ self.stdouts.append(["this is some output"])
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ stackwalk_binary=self.stackwalk,
+ dump_save_path=save_path,
+ quiet=True))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.dmp")))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.extra")))
+
+ def test_save_path_envvar(self):
+ """
+ Test that the MINDUMP_SAVE_PATH environment variable works.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ open(os.path.join(self.tempdir, "test.extra"), "w").write("bar")
+ save_path = os.path.join(self.tempdir, "saved")
+ os.mkdir(save_path)
+ self.stdouts.append(["this is some output"])
+ os.environ['MINIDUMP_SAVE_PATH'] = save_path
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path='symbols_path',
+ stackwalk_binary=self.stackwalk,
+ quiet=True))
+ del os.environ['MINIDUMP_SAVE_PATH']
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.dmp")))
+ self.assert_(os.path.isfile(os.path.join(save_path, "test.extra")))
+
+ def test_symbol_path_not_present(self):
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ self.stdouts.append(["this is some output"])
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbols_path=None,
+ stackwalk_binary=self.stackwalk,
+ quiet=True))
+
+ def test_symbol_path_url(self):
+ """
+ Test that passing a URL as symbols_path correctly fetches the URL.
+ """
+ open(os.path.join(self.tempdir, "test.dmp"), "w").write("foo")
+ self.stdouts.append(["this is some output"])
+
+ def make_zipfile():
+ data = StringIO.StringIO()
+ z = zipfile.ZipFile(data, 'w')
+ z.writestr("symbols.txt", "abc/xyz")
+ z.close()
+ return data.getvalue()
+
+ def get_symbols(req):
+ headers = {}
+ return (200, headers, make_zipfile())
+ httpd = mozhttpd.MozHttpd(port=0,
+ urlhandlers=[{'method': 'GET',
+ 'path': '/symbols',
+ 'function': get_symbols}])
+ httpd.start()
+ symbol_url = urlparse.urlunsplit(('http', '%s:%d' % httpd.httpd.server_address,
+ '/symbols', '', ''))
+ self.assert_(mozcrash.check_for_crashes(self.tempdir,
+ symbol_url,
+ stackwalk_binary=self.stackwalk,
+ quiet=True))
+
+
+class TestJavaException(unittest.TestCase):
+
+ def setUp(self):
+ self.test_log = [
+ "01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> "
+ "REPORTING UNCAUGHT EXCEPTION FROM THREAD 9 (\"GeckoBackgroundThread\")",
+ "01-30 20:15:41.937 E/GeckoAppShell( 1703): java.lang.NullPointerException",
+ "01-30 20:15:41.937 E/GeckoAppShell( 1703):"
+ " at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833)",
+ "01-30 20:15:41.937 E/GeckoAppShell( 1703):"
+ " at android.os.Handler.handleCallback(Handler.java:587)"]
+
+ def test_uncaught_exception(self):
+ """
+ Test for an exception which should be caught
+ """
+ self.assert_(mozcrash.check_for_java_exception(self.test_log, quiet=True))
+
+ def test_truncated_exception(self):
+ """
+ Test for an exception which should be caught which
+ was truncated
+ """
+ truncated_log = list(self.test_log)
+ truncated_log[0], truncated_log[1] = truncated_log[1], truncated_log[0]
+ self.assert_(mozcrash.check_for_java_exception(truncated_log, quiet=True))
+
+ def test_unchecked_exception(self):
+ """
+ Test for an exception which should not be caught
+ """
+ passable_log = list(self.test_log)
+ passable_log[0] = "01-30 20:15:41.937 E/GeckoAppShell( 1703):" \
+ " >>> NOT-SO-BAD EXCEPTION FROM THREAD 9 (\"GeckoBackgroundThread\")"
+ self.assert_(not mozcrash.check_for_java_exception(passable_log, quiet=True))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdebug/mozdebug/__init__.py b/testing/mozbase/mozdebug/mozdebug/__init__.py
new file mode 100644
index 000000000..3450d755c
--- /dev/null
+++ b/testing/mozbase/mozdebug/mozdebug/__init__.py
@@ -0,0 +1,31 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+This module contains a set of function to gather information about the
+debugging capabilities of the platform. It allows to look for a specific
+debugger or to query the system for a compatible/default debugger.
+
+The following simple example looks for the default debugger on the
+current platform and launches a debugger process with the correct
+debugger-specific arguments:
+
+::
+
+ import mozdebug
+
+ debugger = mozdebug.get_default_debugger_name()
+ debuggerInfo = mozdebug.get_debugger_info(debugger)
+
+ debuggeePath = "toDebug"
+
+ processArgs = [self.debuggerInfo.path] + self.debuggerInfo.args
+ processArgs.append(debuggeePath)
+
+ run_process(args, ...)
+
+"""
+
+from mozdebug import *
diff --git a/testing/mozbase/mozdebug/mozdebug/mozdebug.py b/testing/mozbase/mozdebug/mozdebug/mozdebug.py
new file mode 100755
index 000000000..5777a0001
--- /dev/null
+++ b/testing/mozbase/mozdebug/mozdebug/mozdebug.py
@@ -0,0 +1,291 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import mozinfo
+from collections import namedtuple
+from distutils.spawn import find_executable
+from subprocess import check_output
+
+__all__ = ['get_debugger_info',
+ 'get_default_debugger_name',
+ 'DebuggerSearch',
+ 'get_default_valgrind_args']
+
+'''
+Map of debugging programs to information about them, like default arguments
+and whether or not they are interactive.
+
+To add support for a new debugger, simply add the relative entry in
+_DEBUGGER_INFO and optionally update the _DEBUGGER_PRIORITIES.
+'''
+_DEBUGGER_INFO = {
+ # gdb requires that you supply the '--args' flag in order to pass arguments
+ # after the executable name to the executable.
+ 'gdb': {
+ 'interactive': True,
+ 'args': ['-q', '--args']
+ },
+
+ 'cgdb': {
+ 'interactive': True,
+ 'args': ['-q', '--args']
+ },
+
+ 'lldb': {
+ 'interactive': True,
+ 'args': ['--'],
+ 'requiresEscapedArgs': True
+ },
+
+ # Visual Studio Debugger Support.
+ 'devenv.exe': {
+ 'interactive': True,
+ 'args': ['-debugexe']
+ },
+
+ # Visual C++ Express Debugger Support.
+ 'wdexpress.exe': {
+ 'interactive': True,
+ 'args': ['-debugexe']
+ },
+
+ # Windows Development Kit super-debugger.
+ 'windbg.exe': {
+ 'interactive': True,
+ },
+}
+
+# Maps each OS platform to the preferred debugger programs found in _DEBUGGER_INFO.
+_DEBUGGER_PRIORITIES = {
+ 'win': ['devenv.exe', 'wdexpress.exe'],
+ 'linux': ['gdb', 'cgdb', 'lldb'],
+ 'mac': ['lldb', 'gdb'],
+ 'android': ['gdb'],
+ 'unknown': ['gdb']
+}
+
+
+def _windbg_installation_paths():
+ programFilesSuffixes = ['', ' (x86)']
+ programFiles = "C:/Program Files"
+ # Try the most recent versions first.
+ windowsKitsVersions = ['10', '8.1', '8']
+
+ for suffix in programFilesSuffixes:
+ windowsKitsPrefix = os.path.join(programFiles + suffix,
+ 'Windows Kits')
+ for version in windowsKitsVersions:
+ yield os.path.join(windowsKitsPrefix, version,
+ 'Debuggers', 'x86', 'windbg.exe')
+
+
+def get_debugger_path(debugger):
+ '''
+ Get the full path of the debugger.
+
+ :param debugger: The name of the debugger.
+ '''
+
+ if mozinfo.os == 'mac' and debugger == 'lldb':
+ # On newer OSX versions System Integrity Protections prevents us from
+ # setting certain env vars for a process such as DYLD_LIBRARY_PATH if
+ # it's in a protected directory such as /usr/bin. This is the case for
+ # lldb, so we try to find an instance under the Xcode install instead.
+
+ # Attempt to use the xcrun util to find the path.
+ try:
+ path = check_output(['xcrun', '--find', 'lldb']).strip()
+ if path:
+ return path
+ except:
+ # Just default to find_executable instead.
+ pass
+
+ return find_executable(debugger)
+
+
+def get_debugger_info(debugger, debuggerArgs=None, debuggerInteractive=False):
+ '''
+ Get the information about the requested debugger.
+
+ Returns a dictionary containing the |path| of the debugger executable,
+ if it will run in |interactive| mode, its arguments and whether it needs
+ to escape arguments it passes to the debugged program (|requiresEscapedArgs|).
+ If the debugger cannot be found in the system, returns |None|.
+
+ :param debugger: The name of the debugger.
+ :param debuggerArgs: If specified, it's the arguments to pass to the debugger,
+ as a string. Any debugger-specific separator arguments are appended after these
+ arguments.
+ :param debuggerInteractive: If specified, forces the debugger to be interactive.
+ '''
+
+ debuggerPath = None
+
+ if debugger:
+ # Append '.exe' to the debugger on Windows if it's not present,
+ # so things like '--debugger=devenv' work.
+ if (os.name == 'nt'
+ and not debugger.lower().endswith('.exe')):
+ debugger += '.exe'
+
+ debuggerPath = get_debugger_path(debugger)
+
+ if not debuggerPath:
+ # windbg is not installed with the standard set of tools, and it's
+ # entirely possible that the user hasn't added the install location to
+ # PATH, so we have to be a little more clever than normal to locate it.
+ # Just try to look for it in the standard installed location(s).
+ if debugger == 'windbg.exe':
+ for candidate in _windbg_installation_paths():
+ if os.path.exists(candidate):
+ debuggerPath = candidate
+ break
+ else:
+ if os.path.exists(debugger):
+ debuggerPath = debugger
+
+ if not debuggerPath:
+ print 'Error: Could not find debugger %s.' % debugger
+ return None
+
+ debuggerName = os.path.basename(debuggerPath).lower()
+
+ def get_debugger_info(type, default):
+ if debuggerName in _DEBUGGER_INFO and type in _DEBUGGER_INFO[debuggerName]:
+ return _DEBUGGER_INFO[debuggerName][type]
+ return default
+
+ # Define a namedtuple to access the debugger information from the outside world.
+ DebuggerInfo = namedtuple(
+ 'DebuggerInfo',
+ ['path', 'interactive', 'args', 'requiresEscapedArgs']
+ )
+
+ debugger_arguments = []
+
+ if debuggerArgs:
+ # Append the provided debugger arguments at the end of the arguments list.
+ debugger_arguments += debuggerArgs.split()
+
+ debugger_arguments += get_debugger_info('args', [])
+
+ # Override the default debugger interactive mode if needed.
+ debugger_interactive = get_debugger_info('interactive', False)
+ if debuggerInteractive:
+ debugger_interactive = debuggerInteractive
+
+ d = DebuggerInfo(
+ debuggerPath,
+ debugger_interactive,
+ debugger_arguments,
+ get_debugger_info('requiresEscapedArgs', False)
+ )
+
+ return d
+
+# Defines the search policies to use in get_default_debugger_name.
+
+
+class DebuggerSearch:
+ OnlyFirst = 1
+ KeepLooking = 2
+
+
+def get_default_debugger_name(search=DebuggerSearch.OnlyFirst):
+ '''
+ Get the debugger name for the default debugger on current platform.
+
+ :param search: If specified, stops looking for the debugger if the
+ default one is not found (|DebuggerSearch.OnlyFirst|) or keeps
+ looking for other compatible debuggers (|DebuggerSearch.KeepLooking|).
+ '''
+
+ mozinfo.find_and_update_from_json()
+ os = mozinfo.info['os']
+
+ # Find out which debuggers are preferred for use on this platform.
+ debuggerPriorities = _DEBUGGER_PRIORITIES[os if os in _DEBUGGER_PRIORITIES else 'unknown']
+
+ # Finally get the debugger information.
+ for debuggerName in debuggerPriorities:
+ debuggerPath = find_executable(debuggerName)
+ if debuggerPath:
+ return debuggerName
+ elif not search == DebuggerSearch.KeepLooking:
+ return None
+
+ return None
+
+# Defines default values for Valgrind flags.
+#
+# --smc-check=all-non-file is required to deal with code generation and
+# patching by the various JITS. Note that this is only necessary on
+# x86 and x86_64, but not on ARM. This flag is only necessary for
+# Valgrind versions prior to 3.11.
+#
+# --vex-iropt-register-updates=allregs-at-mem-access is required so that
+# Valgrind generates correct register values whenever there is a
+# segfault that is caught and handled. In particular OdinMonkey
+# requires this. More recent Valgrinds (3.11 and later) provide
+# --px-default=allregs-at-mem-access and
+# --px-file-backed=unwindregs-at-mem-access
+# which provide a significantly cheaper alternative, by restricting the
+# precise exception behaviour to JIT generated code only.
+#
+# --trace-children=yes is required to get Valgrind to follow into
+# content and other child processes. The resulting output can be
+# difficult to make sense of, and --child-silent-after-fork=yes
+# helps by causing Valgrind to be silent for the child in the period
+# after fork() but before its subsequent exec().
+#
+# --trace-children-skip lists processes that we are not interested
+# in tracing into.
+#
+# --leak-check=full requests full stack traces for all leaked blocks
+# detected at process exit.
+#
+# --show-possibly-lost=no requests blocks for which only an interior
+# pointer was found to be considered not leaked.
+#
+#
+# TODO: pass in the user supplied args for V (--valgrind-args=) and
+# use this to detect if a different tool has been selected. If so
+# adjust tool-specific args appropriately.
+#
+# TODO: pass in the path to the Valgrind to be used (--valgrind=), and
+# check what flags it accepts. Possible args that might be beneficial:
+#
+# --num-transtab-sectors=24 [reduces re-jitting overheads in long runs]
+# --px-default=allregs-at-mem-access
+# --px-file-backed=unwindregs-at-mem-access
+# [these reduce PX overheads as described above]
+#
+
+
+def get_default_valgrind_args():
+ return (['--fair-sched=yes',
+ '--smc-check=all-non-file',
+ '--vex-iropt-register-updates=allregs-at-mem-access',
+ '--trace-children=yes',
+ '--child-silent-after-fork=yes',
+ ('--trace-children-skip='
+ + '/usr/bin/hg,/bin/rm,*/bin/certutil,*/bin/pk12util,'
+ + '*/bin/ssltunnel,*/bin/uname,*/bin/which,*/bin/ps,'
+ + '*/bin/grep,*/bin/java'),
+ ]
+ + get_default_valgrind_tool_specific_args())
+
+# The default tool is Memcheck. Feeding these arguments to a different
+# Valgrind tool will cause it to fail at startup, so don't do that!
+
+
+def get_default_valgrind_tool_specific_args():
+ return ['--partial-loads-ok=yes',
+ '--leak-check=full',
+ '--show-possibly-lost=no',
+ ]
diff --git a/testing/mozbase/mozdebug/setup.py b/testing/mozbase/mozdebug/setup.py
new file mode 100644
index 000000000..1f4e5329b
--- /dev/null
+++ b/testing/mozbase/mozdebug/setup.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = '0.1'
+
+setup(name='mozdebug',
+ version=PACKAGE_VERSION,
+ description="Utilities for running applications under native code debuggers "
+ "intended for use in Mozilla testing",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozdebug'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=['mozinfo'],
+ entry_points="""
+ # -*- Entry points: -*-
+ """,
+ )
diff --git a/testing/mozbase/mozdevice/adb_tests/test_device_running_adb_as_root.py b/testing/mozbase/mozdevice/adb_tests/test_device_running_adb_as_root.py
new file mode 100644
index 000000000..2b223bb04
--- /dev/null
+++ b/testing/mozbase/mozdevice/adb_tests/test_device_running_adb_as_root.py
@@ -0,0 +1,48 @@
+"""
+ This test is to test devices that adbd does not get started as root.
+ Specifically devices that have ro.secure == 1 and ro.debuggable == 1
+
+ Running this test case requires various reboots which makes it a
+ very slow test case to run.
+"""
+import unittest
+import sys
+
+from mozdevice import DeviceManagerADB
+
+
+class TestFileOperations(unittest.TestCase):
+
+ def setUp(self):
+ dm = DeviceManagerADB()
+ dm.reboot(wait=True)
+
+ def test_run_adb_as_root_parameter(self):
+ dm = DeviceManagerADB()
+ self.assertTrue(dm.processInfo("adbd")[2] != "root")
+ dm = DeviceManagerADB(runAdbAsRoot=True)
+ self.assertTrue(dm.processInfo("adbd")[2] == "root")
+
+ def test_after_reboot_adb_runs_as_root(self):
+ dm = DeviceManagerADB(runAdbAsRoot=True)
+ self.assertTrue(dm.processInfo("adbd")[2] == "root")
+ dm.reboot(wait=True)
+ self.assertTrue(dm.processInfo("adbd")[2] == "root")
+
+ def tearDown(self):
+ dm = DeviceManagerADB()
+ dm.reboot()
+
+if __name__ == "__main__":
+ dm = DeviceManagerADB()
+ if not dm.devices():
+ print "There are no connected adb devices"
+ sys.exit(1)
+ else:
+ if not (int(dm._runCmd(["shell", "getprop", "ro.secure"]).output[0]) and
+ int(dm._runCmd(["shell", "getprop", "ro.debuggable"]).output[0])):
+ print "This test case is meant for devices with devices that start " \
+ "adbd as non-root and allows for adbd to be restarted as root."
+ sys.exit(1)
+
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/adb_tests/test_devicemanagerADB.py b/testing/mozbase/mozdevice/adb_tests/test_devicemanagerADB.py
new file mode 100644
index 000000000..495e449a4
--- /dev/null
+++ b/testing/mozbase/mozdevice/adb_tests/test_devicemanagerADB.py
@@ -0,0 +1,219 @@
+"""
+ Info:
+ This tests DeviceManagerADB with a real device
+
+ Requirements:
+ - You must have a device connected
+ - It should be listed under 'adb devices'
+
+ Notes:
+ - Not all functions have been covered.
+ In particular, functions from the parent class
+ - No testing of properties is done
+ - The test case are very simple and it could be
+ done with deeper inspection of the return values
+
+ Author(s):
+ - Armen Zambrano <armenzg@mozilla.com>
+
+ Functions that are not being tested:
+ - launchProcess - DEPRECATED
+ - getIP
+ - recordLogcat
+ - saveScreenshot
+ - validateDir
+ - mkDirs
+ - getDeviceRoot
+ - shellCheckOutput
+ - processExist
+
+ I assume these functions are only useful for Android
+ - getAppRoot()
+ - updateApp()
+ - uninstallApp()
+ - uninstallAppAndReboot()
+"""
+
+import os
+import re
+import socket
+import sys
+import tempfile
+import unittest
+from StringIO import StringIO
+
+from mozdevice import DeviceManagerADB, DMError
+
+
+def find_mount_permissions(dm, mount_path):
+ for mount_point in dm._runCmd(["shell", "mount"]).output:
+ if mount_point.find(mount_path) > 0:
+ return re.search('(ro|rw)(?=,)', mount_point).group(0)
+
+
+class DeviceManagerADBTestCase(unittest.TestCase):
+ tempLocalDir = "tempDir"
+ tempLocalFile = os.path.join(tempLocalDir, "tempfile.txt")
+ tempRemoteDir = None
+ tempRemoteFile = None
+ tempRemoteSystemFile = None
+
+ def setUp(self):
+ self.assertTrue(find_mount_permissions(self.dm, "/system"), "ro")
+
+ self.assertTrue(os.path.exists(self.tempLocalDir))
+ self.assertTrue(os.path.exists(self.tempLocalFile))
+
+ if self.dm.fileExists(self.tempRemoteFile):
+ self.dm.removeFile(self.tempRemoteFile)
+ self.assertFalse(self.dm.fileExists(self.tempRemoteFile))
+
+ if self.dm.fileExists(self.tempRemoteSystemFile):
+ self.dm.removeFile(self.tempRemoteSystemFile)
+
+ self.assertTrue(self.dm.dirExists(self.tempRemoteDir))
+
+ @classmethod
+ def setUpClass(self):
+ self.dm = DeviceManagerADB()
+ if not os.path.exists(self.tempLocalDir):
+ os.mkdir(self.tempLocalDir)
+ if not os.path.exists(self.tempLocalFile):
+ # Create empty file
+ open(self.tempLocalFile, 'w').close()
+ self.tempRemoteDir = self.dm.getTempDir()
+ self.tempRemoteFile = os.path.join(self.tempRemoteDir,
+ os.path.basename(self.tempLocalFile))
+ self.tempRemoteSystemFile = \
+ os.path.join("/system", os.path.basename(self.tempLocalFile))
+
+ @classmethod
+ def tearDownClass(self):
+ os.remove(self.tempLocalFile)
+ os.rmdir(self.tempLocalDir)
+ if self.dm.dirExists(self.tempRemoteDir):
+ # self.tempRemoteFile will get deleted with it
+ self.dm.removeDir(self.tempRemoteDir)
+ if self.dm.fileExists(self.tempRemoteSystemFile):
+ self.dm.removeFile(self.tempRemoteSystemFile)
+
+
+class TestFileOperations(DeviceManagerADBTestCase):
+
+ def test_make_and_remove_directory(self):
+ dir1 = os.path.join(self.tempRemoteDir, "dir1")
+ self.assertFalse(self.dm.dirExists(dir1))
+ self.dm.mkDir(dir1)
+ self.assertTrue(self.dm.dirExists(dir1))
+ self.dm.removeDir(dir1)
+ self.assertFalse(self.dm.dirExists(dir1))
+
+ def test_push_and_remove_file(self):
+ self.dm.pushFile(self.tempLocalFile, self.tempRemoteFile)
+ self.assertTrue(self.dm.fileExists(self.tempRemoteFile))
+ self.dm.removeFile(self.tempRemoteFile)
+ self.assertFalse(self.dm.fileExists(self.tempRemoteFile))
+
+ def test_push_and_pull_file(self):
+ self.dm.pushFile(self.tempLocalFile, self.tempRemoteFile)
+ self.assertTrue(self.dm.fileExists(self.tempRemoteFile))
+ self.assertFalse(os.path.exists("pulled.txt"))
+ self.dm.getFile(self.tempRemoteFile, "pulled.txt")
+ self.assertTrue(os.path.exists("pulled.txt"))
+ os.remove("pulled.txt")
+
+ def test_push_and_pull_directory_and_list_files(self):
+ self.dm.removeDir(self.tempRemoteDir)
+ self.assertFalse(self.dm.dirExists(self.tempRemoteDir))
+ self.dm.pushDir(self.tempLocalDir, self.tempRemoteDir)
+ self.assertTrue(self.dm.dirExists(self.tempRemoteDir))
+ response = self.dm.listFiles(self.tempRemoteDir)
+ # The local dir that was pushed contains the tempLocalFile
+ self.assertIn(os.path.basename(self.tempLocalFile), response)
+ # Create a temp dir to pull to
+ temp_dir = tempfile.mkdtemp()
+ self.assertTrue(os.path.exists(temp_dir))
+ self.dm.getDirectory(self.tempRemoteDir, temp_dir)
+ self.assertTrue(os.path.exists(self.tempLocalFile))
+
+ def test_move_and_remove_directories(self):
+ dir1 = os.path.join(self.tempRemoteDir, "dir1")
+ dir2 = os.path.join(self.tempRemoteDir, "dir2")
+
+ self.assertFalse(self.dm.dirExists(dir1))
+ self.dm.mkDir(dir1)
+ self.assertTrue(self.dm.dirExists(dir1))
+
+ self.assertFalse(self.dm.dirExists(dir2))
+ self.dm.moveTree(dir1, dir2)
+ self.assertTrue(self.dm.dirExists(dir2))
+
+ self.dm.removeDir(dir1)
+ self.dm.removeDir(dir2)
+ self.assertFalse(self.dm.dirExists(dir1))
+ self.assertFalse(self.dm.dirExists(dir2))
+
+ def test_push_and_remove_system_file(self):
+ out = StringIO()
+ self.assertTrue(find_mount_permissions(self.dm, "/system") == "ro")
+ self.assertFalse(self.dm.fileExists(self.tempRemoteSystemFile))
+ self.assertRaises(DMError, self.dm.pushFile, self.tempLocalFile, self.tempRemoteSystemFile)
+ self.dm.shell(['mount', '-w', '-o', 'remount', '/system'], out)
+ self.assertTrue(find_mount_permissions(self.dm, "/system") == "rw")
+ self.assertFalse(self.dm.fileExists(self.tempRemoteSystemFile))
+ self.dm.pushFile(self.tempLocalFile, self.tempRemoteSystemFile)
+ self.assertTrue(self.dm.fileExists(self.tempRemoteSystemFile))
+ self.dm.removeFile(self.tempRemoteSystemFile)
+ self.assertFalse(self.dm.fileExists(self.tempRemoteSystemFile))
+ self.dm.shell(['mount', '-r', '-o', 'remount', '/system'], out)
+ out.close()
+ self.assertTrue(find_mount_permissions(self.dm, "/system") == "ro")
+
+
+class TestOther(DeviceManagerADBTestCase):
+
+ def test_get_list_of_processes(self):
+ self.assertEquals(type(self.dm.getProcessList()), list)
+
+ def test_get_current_time(self):
+ self.assertEquals(type(self.dm.getCurrentTime()), int)
+
+ def test_get_info(self):
+ self.assertEquals(type(self.dm.getInfo()), dict)
+
+ def test_list_devices(self):
+ self.assertEquals(len(list(self.dm.devices())), 1)
+
+ def test_shell(self):
+ out = StringIO()
+ self.dm.shell(["echo", "$COMPANY", ";", "pwd"], out,
+ env={"COMPANY": "Mozilla"}, cwd="/", timeout=4, root=True)
+ output = str(out.getvalue()).rstrip().splitlines()
+ out.close()
+ self.assertEquals(output, ['Mozilla', '/'])
+
+ def test_port_forwarding(self):
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.bind(("", 0))
+ port = s.getsockname()[1]
+ s.close()
+ # If successful then no exception is raised
+ self.dm.forward("tcp:%s" % port, "tcp:2828")
+
+ def test_port_forwarding_error(self):
+ self.assertRaises(DMError, self.dm.forward, "", "")
+
+
+if __name__ == '__main__':
+ dm = DeviceManagerADB()
+ if not dm.devices():
+ print "There are no connected adb devices"
+ sys.exit(1)
+
+ if find_mount_permissions(dm, "/system") == "rw":
+ print "We've found out that /system is mounted as 'rw'. This is because the command " \
+ "'adb remount' has been run before running this test case. Please reboot the device " \
+ "and try again."
+ sys.exit(1)
+
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/mozdevice/Zeroconf.py b/testing/mozbase/mozdevice/mozdevice/Zeroconf.py
new file mode 100755
index 000000000..54a5d5359
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/Zeroconf.py
@@ -0,0 +1,1560 @@
+""" Multicast DNS Service Discovery for Python, v0.12
+ Copyright (C) 2003, Paul Scott-Murphy
+
+ This module provides a framework for the use of DNS Service Discovery
+ using IP multicast. It has been tested against the JRendezvous
+ implementation from <a href="http://strangeberry.com">StrangeBerry</a>,
+ and against the mDNSResponder from Mac OS X 10.3.8.
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+"""
+
+"""0.12 update - allow selection of binding interface
+ typo fix - Thanks A. M. Kuchlingi
+ removed all use of word 'Rendezvous' - this is an API change"""
+
+"""0.11 update - correction to comments for addListener method
+ support for new record types seen from OS X
+ - IPv6 address
+ - hostinfo
+ ignore unknown DNS record types
+ fixes to name decoding
+ works alongside other processes using port 5353 (e.g. on Mac OS X)
+ tested against Mac OS X 10.3.2's mDNSResponder
+ corrections to removal of list entries for service browser"""
+
+"""0.10 update - Jonathon Paisley contributed these corrections:
+ always multicast replies, even when query is unicast
+ correct a pointer encoding problem
+ can now write records in any order
+ traceback shown on failure
+ better TXT record parsing
+ server is now separate from name
+ can cancel a service browser
+
+ modified some unit tests to accommodate these changes"""
+
+"""0.09 update - remove all records on service unregistration
+ fix DOS security problem with readName"""
+
+"""0.08 update - changed licensing to LGPL"""
+
+"""0.07 update - faster shutdown on engine
+ pointer encoding of outgoing names
+ ServiceBrowser now works
+ new unit tests"""
+
+"""0.06 update - small improvements with unit tests
+ added defined exception types
+ new style objects
+ fixed hostname/interface problem
+ fixed socket timeout problem
+ fixed addServiceListener() typo bug
+ using select() for socket reads
+ tested on Debian unstable with Python 2.2.2"""
+
+"""0.05 update - ensure case insensitivty on domain names
+ support for unicast DNS queries"""
+
+"""0.04 update - added some unit tests
+ added __ne__ adjuncts where required
+ ensure names end in '.local.'
+ timeout on receiving socket for clean shutdown"""
+
+__author__ = "Paul Scott-Murphy"
+__email__ = "paul at scott dash murphy dot com"
+__version__ = "0.12"
+
+import string
+import time
+import struct
+import socket
+import threading
+import select
+import traceback
+
+__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
+
+# hook for threads
+
+globals()['_GLOBAL_DONE'] = 0
+
+# Some timing constants
+
+_UNREGISTER_TIME = 125
+_CHECK_TIME = 175
+_REGISTER_TIME = 225
+_LISTENER_TIME = 200
+_BROWSER_TIME = 500
+
+# Some DNS constants
+
+_MDNS_ADDR = '224.0.0.251'
+_MDNS_PORT = 5353;
+_DNS_PORT = 53;
+_DNS_TTL = 60 * 60; # one hour default TTL
+
+_MAX_MSG_TYPICAL = 1460 # unused
+_MAX_MSG_ABSOLUTE = 8972
+
+_FLAGS_QR_MASK = 0x8000 # query response mask
+_FLAGS_QR_QUERY = 0x0000 # query
+_FLAGS_QR_RESPONSE = 0x8000 # response
+
+_FLAGS_AA = 0x0400 # Authorative answer
+_FLAGS_TC = 0x0200 # Truncated
+_FLAGS_RD = 0x0100 # Recursion desired
+_FLAGS_RA = 0x8000 # Recursion available
+
+_FLAGS_Z = 0x0040 # Zero
+_FLAGS_AD = 0x0020 # Authentic data
+_FLAGS_CD = 0x0010 # Checking disabled
+
+_CLASS_IN = 1
+_CLASS_CS = 2
+_CLASS_CH = 3
+_CLASS_HS = 4
+_CLASS_NONE = 254
+_CLASS_ANY = 255
+_CLASS_MASK = 0x7FFF
+_CLASS_UNIQUE = 0x8000
+
+_TYPE_A = 1
+_TYPE_NS = 2
+_TYPE_MD = 3
+_TYPE_MF = 4
+_TYPE_CNAME = 5
+_TYPE_SOA = 6
+_TYPE_MB = 7
+_TYPE_MG = 8
+_TYPE_MR = 9
+_TYPE_NULL = 10
+_TYPE_WKS = 11
+_TYPE_PTR = 12
+_TYPE_HINFO = 13
+_TYPE_MINFO = 14
+_TYPE_MX = 15
+_TYPE_TXT = 16
+_TYPE_AAAA = 28
+_TYPE_SRV = 33
+_TYPE_ANY = 255
+
+# Mapping constants to names
+
+_CLASSES = { _CLASS_IN : "in",
+ _CLASS_CS : "cs",
+ _CLASS_CH : "ch",
+ _CLASS_HS : "hs",
+ _CLASS_NONE : "none",
+ _CLASS_ANY : "any" }
+
+_TYPES = { _TYPE_A : "a",
+ _TYPE_NS : "ns",
+ _TYPE_MD : "md",
+ _TYPE_MF : "mf",
+ _TYPE_CNAME : "cname",
+ _TYPE_SOA : "soa",
+ _TYPE_MB : "mb",
+ _TYPE_MG : "mg",
+ _TYPE_MR : "mr",
+ _TYPE_NULL : "null",
+ _TYPE_WKS : "wks",
+ _TYPE_PTR : "ptr",
+ _TYPE_HINFO : "hinfo",
+ _TYPE_MINFO : "minfo",
+ _TYPE_MX : "mx",
+ _TYPE_TXT : "txt",
+ _TYPE_AAAA : "quada",
+ _TYPE_SRV : "srv",
+ _TYPE_ANY : "any" }
+
+# utility functions
+
+def currentTimeMillis():
+ """Current system time in milliseconds"""
+ return time.time() * 1000
+
+# Exceptions
+
+class NonLocalNameException(Exception):
+ pass
+
+class NonUniqueNameException(Exception):
+ pass
+
+class NamePartTooLongException(Exception):
+ pass
+
+class AbstractMethodException(Exception):
+ pass
+
+class BadTypeInNameException(Exception):
+ pass
+
+# implementation classes
+
+class DNSEntry(object):
+ """A DNS entry"""
+
+ def __init__(self, name, type, clazz):
+ self.key = string.lower(name)
+ self.name = name
+ self.type = type
+ self.clazz = clazz & _CLASS_MASK
+ self.unique = (clazz & _CLASS_UNIQUE) != 0
+
+ def __eq__(self, other):
+ """Equality test on name, type, and class"""
+ if isinstance(other, DNSEntry):
+ return self.name == other.name and self.type == other.type and self.clazz == other.clazz
+ return 0
+
+ def __ne__(self, other):
+ """Non-equality test"""
+ return not self.__eq__(other)
+
+ def getClazz(self, clazz):
+ """Class accessor"""
+ try:
+ return _CLASSES[clazz]
+ except:
+ return "?(%s)" % (clazz)
+
+ def getType(self, type):
+ """Type accessor"""
+ try:
+ return _TYPES[type]
+ except:
+ return "?(%s)" % (type)
+
+ def toString(self, hdr, other):
+ """String representation with additional information"""
+ result = "%s[%s,%s" % (hdr, self.getType(self.type), self.getClazz(self.clazz))
+ if self.unique:
+ result += "-unique,"
+ else:
+ result += ","
+ result += self.name
+ if other is not None:
+ result += ",%s]" % (other)
+ else:
+ result += "]"
+ return result
+
+class DNSQuestion(DNSEntry):
+ """A DNS question entry"""
+
+ def __init__(self, name, type, clazz):
+ if not name.endswith(".local."):
+ raise NonLocalNameException
+ DNSEntry.__init__(self, name, type, clazz)
+
+ def answeredBy(self, rec):
+ """Returns true if the question is answered by the record"""
+ return self.clazz == rec.clazz and (self.type == rec.type or self.type == _TYPE_ANY) and self.name == rec.name
+
+ def __repr__(self):
+ """String representation"""
+ return DNSEntry.toString(self, "question", None)
+
+
+class DNSRecord(DNSEntry):
+ """A DNS record - like a DNS entry, but has a TTL"""
+
+ def __init__(self, name, type, clazz, ttl):
+ DNSEntry.__init__(self, name, type, clazz)
+ self.ttl = ttl
+ self.created = currentTimeMillis()
+
+ def __eq__(self, other):
+ """Tests equality as per DNSRecord"""
+ if isinstance(other, DNSRecord):
+ return DNSEntry.__eq__(self, other)
+ return 0
+
+ def suppressedBy(self, msg):
+ """Returns true if any answer in a message can suffice for the
+ information held in this record."""
+ for record in msg.answers:
+ if self.suppressedByAnswer(record):
+ return 1
+ return 0
+
+ def suppressedByAnswer(self, other):
+ """Returns true if another record has same name, type and class,
+ and if its TTL is at least half of this record's."""
+ if self == other and other.ttl > (self.ttl / 2):
+ return 1
+ return 0
+
+ def getExpirationTime(self, percent):
+ """Returns the time at which this record will have expired
+ by a certain percentage."""
+ return self.created + (percent * self.ttl * 10)
+
+ def getRemainingTTL(self, now):
+ """Returns the remaining TTL in seconds."""
+ return max(0, (self.getExpirationTime(100) - now) / 1000)
+
+ def isExpired(self, now):
+ """Returns true if this record has expired."""
+ return self.getExpirationTime(100) <= now
+
+ def isStale(self, now):
+ """Returns true if this record is at least half way expired."""
+ return self.getExpirationTime(50) <= now
+
+ def resetTTL(self, other):
+ """Sets this record's TTL and created time to that of
+ another record."""
+ self.created = other.created
+ self.ttl = other.ttl
+
+ def write(self, out):
+ """Abstract method"""
+ raise AbstractMethodException
+
+ def toString(self, other):
+ """String representation with addtional information"""
+ arg = "%s/%s,%s" % (self.ttl, self.getRemainingTTL(currentTimeMillis()), other)
+ return DNSEntry.toString(self, "record", arg)
+
+class DNSAddress(DNSRecord):
+ """A DNS address record"""
+
+ def __init__(self, name, type, clazz, ttl, address):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.address = address
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeString(self.address, len(self.address))
+
+ def __eq__(self, other):
+ """Tests equality on address"""
+ if isinstance(other, DNSAddress):
+ return self.address == other.address
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ try:
+ return socket.inet_ntoa(self.address)
+ except:
+ return self.address
+
+class DNSHinfo(DNSRecord):
+ """A DNS host information record"""
+
+ def __init__(self, name, type, clazz, ttl, cpu, os):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.cpu = cpu
+ self.os = os
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeString(self.cpu, len(self.cpu))
+ out.writeString(self.os, len(self.os))
+
+ def __eq__(self, other):
+ """Tests equality on cpu and os"""
+ if isinstance(other, DNSHinfo):
+ return self.cpu == other.cpu and self.os == other.os
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ return self.cpu + " " + self.os
+
+class DNSPointer(DNSRecord):
+ """A DNS pointer record"""
+
+ def __init__(self, name, type, clazz, ttl, alias):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.alias = alias
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeName(self.alias)
+
+ def __eq__(self, other):
+ """Tests equality on alias"""
+ if isinstance(other, DNSPointer):
+ return self.alias == other.alias
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ return self.toString(self.alias)
+
+class DNSText(DNSRecord):
+ """A DNS text record"""
+
+ def __init__(self, name, type, clazz, ttl, text):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.text = text
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeString(self.text, len(self.text))
+
+ def __eq__(self, other):
+ """Tests equality on text"""
+ if isinstance(other, DNSText):
+ return self.text == other.text
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ if len(self.text) > 10:
+ return self.toString(self.text[:7] + "...")
+ else:
+ return self.toString(self.text)
+
+class DNSService(DNSRecord):
+ """A DNS service record"""
+
+ def __init__(self, name, type, clazz, ttl, priority, weight, port, server):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.priority = priority
+ self.weight = weight
+ self.port = port
+ self.server = server
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeShort(self.priority)
+ out.writeShort(self.weight)
+ out.writeShort(self.port)
+ out.writeName(self.server)
+
+ def __eq__(self, other):
+ """Tests equality on priority, weight, port and server"""
+ if isinstance(other, DNSService):
+ return self.priority == other.priority and self.weight == other.weight and self.port == other.port and self.server == other.server
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ return self.toString("%s:%s" % (self.server, self.port))
+
+class DNSIncoming(object):
+ """Object representation of an incoming DNS packet"""
+
+ def __init__(self, data):
+ """Constructor from string holding bytes of packet"""
+ self.offset = 0
+ self.data = data
+ self.questions = []
+ self.answers = []
+ self.numQuestions = 0
+ self.numAnswers = 0
+ self.numAuthorities = 0
+ self.numAdditionals = 0
+
+ self.readHeader()
+ self.readQuestions()
+ self.readOthers()
+
+ def readHeader(self):
+ """Reads header portion of packet"""
+ format = '!HHHHHH'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+
+ self.id = info[0]
+ self.flags = info[1]
+ self.numQuestions = info[2]
+ self.numAnswers = info[3]
+ self.numAuthorities = info[4]
+ self.numAdditionals = info[5]
+
+ def readQuestions(self):
+ """Reads questions section of packet"""
+ format = '!HH'
+ length = struct.calcsize(format)
+ for i in range(0, self.numQuestions):
+ name = self.readName()
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+
+ question = DNSQuestion(name, info[0], info[1])
+ self.questions.append(question)
+
+ def readInt(self):
+ """Reads an integer from the packet"""
+ format = '!I'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+ return info[0]
+
+ def readCharacterString(self):
+ """Reads a character string from the packet"""
+ length = ord(self.data[self.offset])
+ self.offset += 1
+ return self.readString(length)
+
+ def readString(self, len):
+ """Reads a string of a given length from the packet"""
+ format = '!' + str(len) + 's'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+ return info[0]
+
+ def readUnsignedShort(self):
+ """Reads an unsigned short from the packet"""
+ format = '!H'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+ return info[0]
+
+ def readOthers(self):
+ """Reads the answers, authorities and additionals section of the packet"""
+ format = '!HHiH'
+ length = struct.calcsize(format)
+ n = self.numAnswers + self.numAuthorities + self.numAdditionals
+ for i in range(0, n):
+ domain = self.readName()
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+
+ rec = None
+ if info[0] == _TYPE_A:
+ rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(4))
+ elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
+ rec = DNSPointer(domain, info[0], info[1], info[2], self.readName())
+ elif info[0] == _TYPE_TXT:
+ rec = DNSText(domain, info[0], info[1], info[2], self.readString(info[3]))
+ elif info[0] == _TYPE_SRV:
+ rec = DNSService(domain, info[0], info[1], info[2], self.readUnsignedShort(), self.readUnsignedShort(), self.readUnsignedShort(), self.readName())
+ elif info[0] == _TYPE_HINFO:
+ rec = DNSHinfo(domain, info[0], info[1], info[2], self.readCharacterString(), self.readCharacterString())
+ elif info[0] == _TYPE_AAAA:
+ rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(16))
+ else:
+ # Try to ignore types we don't know about
+ # this may mean the rest of the name is
+ # unable to be parsed, and may show errors
+ # so this is left for debugging. New types
+ # encountered need to be parsed properly.
+ #
+ #print "UNKNOWN TYPE = " + str(info[0])
+ #raise BadTypeInNameException
+ pass
+
+ if rec is not None:
+ self.answers.append(rec)
+
+ def isQuery(self):
+ """Returns true if this is a query"""
+ return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY
+
+ def isResponse(self):
+ """Returns true if this is a response"""
+ return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE
+
+ def readUTF(self, offset, len):
+ """Reads a UTF-8 string of a given length from the packet"""
+ result = self.data[offset:offset+len].decode('utf-8')
+ return result
+
+ def readName(self):
+ """Reads a domain name from the packet"""
+ result = ''
+ off = self.offset
+ next = -1
+ first = off
+
+ while 1:
+ len = ord(self.data[off])
+ off += 1
+ if len == 0:
+ break
+ t = len & 0xC0
+ if t == 0x00:
+ result = ''.join((result, self.readUTF(off, len) + '.'))
+ off += len
+ elif t == 0xC0:
+ if next < 0:
+ next = off + 1
+ off = ((len & 0x3F) << 8) | ord(self.data[off])
+ if off >= first:
+ raise "Bad domain name (circular) at " + str(off)
+ first = off
+ else:
+ raise "Bad domain name at " + str(off)
+
+ if next >= 0:
+ self.offset = next
+ else:
+ self.offset = off
+
+ return result
+
+
+class DNSOutgoing(object):
+ """Object representation of an outgoing packet"""
+
+ def __init__(self, flags, multicast = 1):
+ self.finished = 0
+ self.id = 0
+ self.multicast = multicast
+ self.flags = flags
+ self.names = {}
+ self.data = []
+ self.size = 12
+
+ self.questions = []
+ self.answers = []
+ self.authorities = []
+ self.additionals = []
+
+ def addQuestion(self, record):
+ """Adds a question"""
+ self.questions.append(record)
+
+ def addAnswer(self, inp, record):
+ """Adds an answer"""
+ if not record.suppressedBy(inp):
+ self.addAnswerAtTime(record, 0)
+
+ def addAnswerAtTime(self, record, now):
+ """Adds an answer if if does not expire by a certain time"""
+ if record is not None:
+ if now == 0 or not record.isExpired(now):
+ self.answers.append((record, now))
+
+ def addAuthorativeAnswer(self, record):
+ """Adds an authoritative answer"""
+ self.authorities.append(record)
+
+ def addAdditionalAnswer(self, record):
+ """Adds an additional answer"""
+ self.additionals.append(record)
+
+ def writeByte(self, value):
+ """Writes a single byte to the packet"""
+ format = '!c'
+ self.data.append(struct.pack(format, chr(value)))
+ self.size += 1
+
+ def insertShort(self, index, value):
+ """Inserts an unsigned short in a certain position in the packet"""
+ format = '!H'
+ self.data.insert(index, struct.pack(format, value))
+ self.size += 2
+
+ def writeShort(self, value):
+ """Writes an unsigned short to the packet"""
+ format = '!H'
+ self.data.append(struct.pack(format, value))
+ self.size += 2
+
+ def writeInt(self, value):
+ """Writes an unsigned integer to the packet"""
+ format = '!I'
+ self.data.append(struct.pack(format, value))
+ self.size += 4
+
+ def writeString(self, value, length):
+ """Writes a string to the packet"""
+ format = '!' + str(length) + 's'
+ self.data.append(struct.pack(format, value))
+ self.size += length
+
+ def writeUTF(self, s):
+ """Writes a UTF-8 string of a given length to the packet"""
+ utfstr = s.encode('utf-8')
+ length = len(utfstr)
+ if length > 64:
+ raise NamePartTooLongException
+ self.writeByte(length)
+ self.writeString(utfstr, length)
+
+ def writeName(self, name):
+ """Writes a domain name to the packet"""
+
+ try:
+ # Find existing instance of this name in packet
+ #
+ index = self.names[name]
+ except KeyError:
+ # No record of this name already, so write it
+ # out as normal, recording the location of the name
+ # for future pointers to it.
+ #
+ self.names[name] = self.size
+ parts = name.split('.')
+ if parts[-1] == '':
+ parts = parts[:-1]
+ for part in parts:
+ self.writeUTF(part)
+ self.writeByte(0)
+ return
+
+ # An index was found, so write a pointer to it
+ #
+ self.writeByte((index >> 8) | 0xC0)
+ self.writeByte(index)
+
+ def writeQuestion(self, question):
+ """Writes a question to the packet"""
+ self.writeName(question.name)
+ self.writeShort(question.type)
+ self.writeShort(question.clazz)
+
+ def writeRecord(self, record, now):
+ """Writes a record (answer, authoritative answer, additional) to
+ the packet"""
+ self.writeName(record.name)
+ self.writeShort(record.type)
+ if record.unique and self.multicast:
+ self.writeShort(record.clazz | _CLASS_UNIQUE)
+ else:
+ self.writeShort(record.clazz)
+ if now == 0:
+ self.writeInt(record.ttl)
+ else:
+ self.writeInt(record.getRemainingTTL(now))
+ index = len(self.data)
+ # Adjust size for the short we will write before this record
+ #
+ self.size += 2
+ record.write(self)
+ self.size -= 2
+
+ length = len(''.join(self.data[index:]))
+ self.insertShort(index, length) # Here is the short we adjusted for
+
+ def packet(self):
+ """Returns a string containing the packet's bytes
+
+ No further parts should be added to the packet once this
+ is done."""
+ if not self.finished:
+ self.finished = 1
+ for question in self.questions:
+ self.writeQuestion(question)
+ for answer, time in self.answers:
+ self.writeRecord(answer, time)
+ for authority in self.authorities:
+ self.writeRecord(authority, 0)
+ for additional in self.additionals:
+ self.writeRecord(additional, 0)
+
+ self.insertShort(0, len(self.additionals))
+ self.insertShort(0, len(self.authorities))
+ self.insertShort(0, len(self.answers))
+ self.insertShort(0, len(self.questions))
+ self.insertShort(0, self.flags)
+ if self.multicast:
+ self.insertShort(0, 0)
+ else:
+ self.insertShort(0, self.id)
+ return ''.join(self.data)
+
+
+class DNSCache(object):
+ """A cache of DNS entries"""
+
+ def __init__(self):
+ self.cache = {}
+
+ def add(self, entry):
+ """Adds an entry"""
+ try:
+ list = self.cache[entry.key]
+ except:
+ list = self.cache[entry.key] = []
+ list.append(entry)
+
+ def remove(self, entry):
+ """Removes an entry"""
+ try:
+ list = self.cache[entry.key]
+ list.remove(entry)
+ except:
+ pass
+
+ def get(self, entry):
+ """Gets an entry by key. Will return None if there is no
+ matching entry."""
+ try:
+ list = self.cache[entry.key]
+ return list[list.index(entry)]
+ except:
+ return None
+
+ def getByDetails(self, name, type, clazz):
+ """Gets an entry by details. Will return None if there is
+ no matching entry."""
+ entry = DNSEntry(name, type, clazz)
+ return self.get(entry)
+
+ def entriesWithName(self, name):
+ """Returns a list of entries whose key matches the name."""
+ try:
+ return self.cache[name]
+ except:
+ return []
+
+ def entries(self):
+ """Returns a list of all entries"""
+ def add(x, y): return x+y
+ try:
+ return reduce(add, self.cache.values())
+ except:
+ return []
+
+
+class Engine(threading.Thread):
+ """An engine wraps read access to sockets, allowing objects that
+ need to receive data from sockets to be called back when the
+ sockets are ready.
+
+ A reader needs a handle_read() method, which is called when the socket
+ it is interested in is ready for reading.
+
+ Writers are not implemented here, because we only send short
+ packets.
+ """
+
+ def __init__(self, zeroconf):
+ threading.Thread.__init__(self)
+ self.zeroconf = zeroconf
+ self.readers = {} # maps socket to reader
+ self.timeout = 5
+ self.condition = threading.Condition()
+ self.daemon = True
+ self.start()
+
+ def run(self):
+ while not globals()['_GLOBAL_DONE']:
+ rs = self.getReaders()
+ if len(rs) == 0:
+ # No sockets to manage, but we wait for the timeout
+ # or addition of a socket
+ #
+ self.condition.acquire()
+ self.condition.wait(self.timeout)
+ self.condition.release()
+ else:
+ try:
+ rr, wr, er = select.select(rs, [], [], self.timeout)
+ for socket in rr:
+ try:
+ self.readers[socket].handle_read()
+ except:
+ # Ignore errors that occur on shutdown
+ pass
+ except:
+ pass
+
+ def getReaders(self):
+ result = []
+ self.condition.acquire()
+ result = self.readers.keys()
+ self.condition.release()
+ return result
+
+ def addReader(self, reader, socket):
+ self.condition.acquire()
+ self.readers[socket] = reader
+ self.condition.notify()
+ self.condition.release()
+
+ def delReader(self, socket):
+ self.condition.acquire()
+ del(self.readers[socket])
+ self.condition.notify()
+ self.condition.release()
+
+ def notify(self):
+ self.condition.acquire()
+ self.condition.notify()
+ self.condition.release()
+
+class Listener(object):
+ """A Listener is used by this module to listen on the multicast
+ group to which DNS messages are sent, allowing the implementation
+ to cache information as it arrives.
+
+ It requires registration with an Engine object in order to have
+ the read() method called when a socket is availble for reading."""
+
+ def __init__(self, zeroconf):
+ self.zeroconf = zeroconf
+ self.zeroconf.engine.addReader(self, self.zeroconf.socket)
+
+ def handle_read(self):
+ data, (addr, port) = self.zeroconf.socket.recvfrom(_MAX_MSG_ABSOLUTE)
+ self.data = data
+ msg = DNSIncoming(data)
+ if msg.isQuery():
+ # Always multicast responses
+ #
+ if port == _MDNS_PORT:
+ self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
+ # If it's not a multicast query, reply via unicast
+ # and multicast
+ #
+ elif port == _DNS_PORT:
+ self.zeroconf.handleQuery(msg, addr, port)
+ self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
+ else:
+ self.zeroconf.handleResponse(msg)
+
+
+class Reaper(threading.Thread):
+ """A Reaper is used by this module to remove cache entries that
+ have expired."""
+
+ def __init__(self, zeroconf):
+ threading.Thread.__init__(self)
+ self.daemon = True
+ self.zeroconf = zeroconf
+ self.start()
+
+ def run(self):
+ while 1:
+ self.zeroconf.wait(10 * 1000)
+ if globals()['_GLOBAL_DONE']:
+ return
+ now = currentTimeMillis()
+ for record in self.zeroconf.cache.entries():
+ if record.isExpired(now):
+ self.zeroconf.updateRecord(now, record)
+ self.zeroconf.cache.remove(record)
+
+
+class ServiceBrowser(threading.Thread):
+ """Used to browse for a service of a specific type.
+
+ The listener object will have its addService() and
+ removeService() methods called when this browser
+ discovers changes in the services availability."""
+
+ def __init__(self, zeroconf, type, listener):
+ """Creates a browser for a specific type"""
+ threading.Thread.__init__(self)
+ self.zeroconf = zeroconf
+ self.type = type
+ self.listener = listener
+ self.services = {}
+ self.nextTime = currentTimeMillis()
+ self.delay = _BROWSER_TIME
+ self.list = []
+ self.daemon = True
+
+ self.done = 0
+
+ self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
+ self.start()
+
+ def updateRecord(self, zeroconf, now, record):
+ """Callback invoked by Zeroconf when new information arrives.
+
+ Updates information required by browser in the Zeroconf cache."""
+ if record.type == _TYPE_PTR and record.name == self.type:
+ expired = record.isExpired(now)
+ try:
+ oldrecord = self.services[record.alias.lower()]
+ if not expired:
+ oldrecord.resetTTL(record)
+ else:
+ del(self.services[record.alias.lower()])
+ callback = lambda x: self.listener.removeService(x, self.type, record.alias)
+ self.list.append(callback)
+ return
+ except:
+ if not expired:
+ self.services[record.alias.lower()] = record
+ callback = lambda x: self.listener.addService(x, self.type, record.alias)
+ self.list.append(callback)
+
+ expires = record.getExpirationTime(75)
+ if expires < self.nextTime:
+ self.nextTime = expires
+
+ def cancel(self):
+ self.done = 1
+ self.zeroconf.notifyAll()
+
+ def run(self):
+ while 1:
+ event = None
+ now = currentTimeMillis()
+ if len(self.list) == 0 and self.nextTime > now:
+ self.zeroconf.wait(self.nextTime - now)
+ if globals()['_GLOBAL_DONE'] or self.done:
+ return
+ now = currentTimeMillis()
+
+ if self.nextTime <= now:
+ out = DNSOutgoing(_FLAGS_QR_QUERY)
+ out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
+ for record in self.services.values():
+ if not record.isExpired(now):
+ out.addAnswerAtTime(record, now)
+ self.zeroconf.send(out)
+ self.nextTime = now + self.delay
+ self.delay = min(20 * 1000, self.delay * 2)
+
+ if len(self.list) > 0:
+ event = self.list.pop(0)
+
+ if event is not None:
+ event(self.zeroconf)
+
+
+class ServiceInfo(object):
+ """Service information"""
+
+ def __init__(self, type, name, address=None, port=None, weight=0, priority=0, properties=None, server=None):
+ """Create a service description.
+
+ type: fully qualified service type name
+ name: fully qualified service name
+ address: IP address as unsigned short, network byte order
+ port: port that the service runs on
+ weight: weight of the service
+ priority: priority of the service
+ properties: dictionary of properties (or a string holding the bytes for the text field)
+ server: fully qualified name for service host (defaults to name)"""
+
+ if not name.endswith(type):
+ raise BadTypeInNameException
+ self.type = type
+ self.name = name
+ self.address = address
+ self.port = port
+ self.weight = weight
+ self.priority = priority
+ if server:
+ self.server = server
+ else:
+ self.server = name
+ self.setProperties(properties)
+
+ def setProperties(self, properties):
+ """Sets properties and text of this info from a dictionary"""
+ if isinstance(properties, dict):
+ self.properties = properties
+ list = []
+ result = ''
+ for key in properties:
+ value = properties[key]
+ if value is None:
+ suffix = ''.encode('utf-8')
+ elif isinstance(value, str):
+ suffix = value.encode('utf-8')
+ elif isinstance(value, int):
+ if value:
+ suffix = 'true'
+ else:
+ suffix = 'false'
+ else:
+ suffix = ''.encode('utf-8')
+ list.append('='.join((key, suffix)))
+ for item in list:
+ result = ''.join((result, struct.pack('!c', chr(len(item))), item))
+ self.text = result
+ else:
+ self.text = properties
+
+ def setText(self, text):
+ """Sets properties and text given a text field"""
+ self.text = text
+ try:
+ result = {}
+ end = len(text)
+ index = 0
+ strs = []
+ while index < end:
+ length = ord(text[index])
+ index += 1
+ strs.append(text[index:index+length])
+ index += length
+
+ for s in strs:
+ eindex = s.find('=')
+ if eindex == -1:
+ # No equals sign at all
+ key = s
+ value = 0
+ else:
+ key = s[:eindex]
+ value = s[eindex+1:]
+ if value == 'true':
+ value = 1
+ elif value == 'false' or not value:
+ value = 0
+
+ # Only update non-existent properties
+ if key and result.get(key) == None:
+ result[key] = value
+
+ self.properties = result
+ except:
+ traceback.print_exc()
+ self.properties = None
+
+ def getType(self):
+ """Type accessor"""
+ return self.type
+
+ def getName(self):
+ """Name accessor"""
+ if self.type is not None and self.name.endswith("." + self.type):
+ return self.name[:len(self.name) - len(self.type) - 1]
+ return self.name
+
+ def getAddress(self):
+ """Address accessor"""
+ return self.address
+
+ def getPort(self):
+ """Port accessor"""
+ return self.port
+
+ def getPriority(self):
+ """Pirority accessor"""
+ return self.priority
+
+ def getWeight(self):
+ """Weight accessor"""
+ return self.weight
+
+ def getProperties(self):
+ """Properties accessor"""
+ return self.properties
+
+ def getText(self):
+ """Text accessor"""
+ return self.text
+
+ def getServer(self):
+ """Server accessor"""
+ return self.server
+
+ def updateRecord(self, zeroconf, now, record):
+ """Updates service information from a DNS record"""
+ if record is not None and not record.isExpired(now):
+ if record.type == _TYPE_A:
+ if record.name == self.name:
+ self.address = record.address
+ elif record.type == _TYPE_SRV:
+ if record.name == self.name:
+ self.server = record.server
+ self.port = record.port
+ self.weight = record.weight
+ self.priority = record.priority
+ self.address = None
+ self.updateRecord(zeroconf, now, zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN))
+ elif record.type == _TYPE_TXT:
+ if record.name == self.name:
+ self.setText(record.text)
+
+ def request(self, zeroconf, timeout):
+ """Returns true if the service could be discovered on the
+ network, and updates this object with details discovered.
+ """
+ now = currentTimeMillis()
+ delay = _LISTENER_TIME
+ next = now + delay
+ last = now + timeout
+ result = 0
+ try:
+ zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN))
+ while self.server is None or self.address is None or self.text is None:
+ if last <= now:
+ return 0
+ if next <= now:
+ out = DNSOutgoing(_FLAGS_QR_QUERY)
+ out.addQuestion(DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN))
+ out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_SRV, _CLASS_IN), now)
+ out.addQuestion(DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN))
+ out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_TXT, _CLASS_IN), now)
+ if self.server is not None:
+ out.addQuestion(DNSQuestion(self.server, _TYPE_A, _CLASS_IN))
+ out.addAnswerAtTime(zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN), now)
+ zeroconf.send(out)
+ next = now + delay
+ delay = delay * 2
+
+ zeroconf.wait(min(next, last) - now)
+ now = currentTimeMillis()
+ result = 1
+ finally:
+ zeroconf.removeListener(self)
+
+ return result
+
+ def __eq__(self, other):
+ """Tests equality of service name"""
+ if isinstance(other, ServiceInfo):
+ return other.name == self.name
+ return 0
+
+ def __ne__(self, other):
+ """Non-equality test"""
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ """String representation"""
+ result = "service[%s,%s:%s," % (self.name, socket.inet_ntoa(self.getAddress()), self.port)
+ if self.text is None:
+ result += "None"
+ else:
+ if len(self.text) < 20:
+ result += self.text
+ else:
+ result += self.text[:17] + "..."
+ result += "]"
+ return result
+
+
+class Zeroconf(object):
+ """Implementation of Zeroconf Multicast DNS Service Discovery
+
+ Supports registration, unregistration, queries and browsing.
+ """
+ def __init__(self, bindaddress=None):
+ """Creates an instance of the Zeroconf class, establishing
+ multicast communications, listening and reaping threads."""
+ globals()['_GLOBAL_DONE'] = 0
+ if bindaddress is None:
+ self.intf = socket.gethostbyname(socket.gethostname())
+ else:
+ self.intf = bindaddress
+ self.group = ('', _MDNS_PORT)
+ self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ try:
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+ except:
+ # SO_REUSEADDR should be equivalent to SO_REUSEPORT for
+ # multicast UDP sockets (p 731, "TCP/IP Illustrated,
+ # Volume 2"), but some BSD-derived systems require
+ # SO_REUSEPORT to be specified explicity. Also, not all
+ # versions of Python have SO_REUSEPORT available. So
+ # if you're on a BSD-based system, and haven't upgraded
+ # to Python 2.3 yet, you may find this library doesn't
+ # work as expected.
+ #
+ pass
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, 255)
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, 1)
+ try:
+ self.socket.bind(self.group)
+ except:
+ # Some versions of linux raise an exception even though
+ # the SO_REUSE* options have been set, so ignore it
+ #
+ pass
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.intf) + socket.inet_aton('0.0.0.0'))
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
+
+ self.listeners = []
+ self.browsers = []
+ self.services = {}
+
+ self.cache = DNSCache()
+
+ self.condition = threading.Condition()
+
+ self.engine = Engine(self)
+ self.listener = Listener(self)
+ self.reaper = Reaper(self)
+
+ def isLoopback(self):
+ return self.intf.startswith("127.0.0.1")
+
+ def isLinklocal(self):
+ return self.intf.startswith("169.254.")
+
+ def wait(self, timeout):
+ """Calling thread waits for a given number of milliseconds or
+ until notified."""
+ self.condition.acquire()
+ self.condition.wait(timeout/1000)
+ self.condition.release()
+
+ def notifyAll(self):
+ """Notifies all waiting threads"""
+ self.condition.acquire()
+ self.condition.notifyAll()
+ self.condition.release()
+
+ def getServiceInfo(self, type, name, timeout=3000):
+ """Returns network's service information for a particular
+ name and type, or None if no service matches by the timeout,
+ which defaults to 3 seconds."""
+ info = ServiceInfo(type, name)
+ if info.request(self, timeout):
+ return info
+ return None
+
+ def addServiceListener(self, type, listener):
+ """Adds a listener for a particular service type. This object
+ will then have its updateRecord method called when information
+ arrives for that type."""
+ self.removeServiceListener(listener)
+ self.browsers.append(ServiceBrowser(self, type, listener))
+
+ def removeServiceListener(self, listener):
+ """Removes a listener from the set that is currently listening."""
+ for browser in self.browsers:
+ if browser.listener == listener:
+ browser.cancel()
+ del(browser)
+
+ def registerService(self, info, ttl=_DNS_TTL):
+ """Registers service information to the network with a default TTL
+ of 60 seconds. Zeroconf will then respond to requests for
+ information for that service. The name of the service may be
+ changed if needed to make it unique on the network."""
+ self.checkService(info)
+ self.services[info.name.lower()] = info
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0)
+ out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, ttl, info.priority, info.weight, info.port, info.server), 0)
+ out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0)
+ if info.address:
+ out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, ttl, info.address), 0)
+ self.send(out)
+ i += 1
+ nextTime += _REGISTER_TIME
+
+ def unregisterService(self, info):
+ """Unregister a service."""
+ try:
+ del(self.services[info.name.lower()])
+ except:
+ pass
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0)
+ out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.name), 0)
+ out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0)
+ if info.address:
+ out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0)
+ self.send(out)
+ i += 1
+ nextTime += _UNREGISTER_TIME
+
+ def unregisterAllServices(self):
+ """Unregister all registered services."""
+ if len(self.services) > 0:
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ for info in self.services.values():
+ out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0)
+ out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.server), 0)
+ out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0)
+ if info.address:
+ out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0)
+ self.send(out)
+ i += 1
+ nextTime += _UNREGISTER_TIME
+
+ def checkService(self, info):
+ """Checks the network for a unique service name, modifying the
+ ServiceInfo passed in if it is not unique."""
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ for record in self.cache.entriesWithName(info.type):
+ if record.type == _TYPE_PTR and not record.isExpired(now) and record.alias == info.name:
+ if (info.name.find('.') < 0):
+ info.name = info.name + ".[" + info.address + ":" + info.port + "]." + info.type
+ self.checkService(info)
+ return
+ raise NonUniqueNameException
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
+ self.debug = out
+ out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
+ out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name))
+ self.send(out)
+ i += 1
+ nextTime += _CHECK_TIME
+
+ def addListener(self, listener, question):
+ """Adds a listener for a given question. The listener will have
+ its updateRecord method called when information is available to
+ answer the question."""
+ now = currentTimeMillis()
+ self.listeners.append(listener)
+ if question is not None:
+ for record in self.cache.entriesWithName(question.name):
+ if question.answeredBy(record) and not record.isExpired(now):
+ listener.updateRecord(self, now, record)
+ self.notifyAll()
+
+ def removeListener(self, listener):
+ """Removes a listener."""
+ try:
+ self.listeners.remove(listener)
+ self.notifyAll()
+ except:
+ pass
+
+ def updateRecord(self, now, rec):
+ """Used to notify listeners of new information that has updated
+ a record."""
+ for listener in self.listeners:
+ listener.updateRecord(self, now, rec)
+ self.notifyAll()
+
+ def handleResponse(self, msg):
+ """Deal with incoming response packets. All answers
+ are held in the cache, and listeners are notified."""
+ now = currentTimeMillis()
+ for record in msg.answers:
+ expired = record.isExpired(now)
+ if record in self.cache.entries():
+ if expired:
+ self.cache.remove(record)
+ else:
+ entry = self.cache.get(record)
+ if entry is not None:
+ entry.resetTTL(record)
+ record = entry
+ else:
+ self.cache.add(record)
+
+ self.updateRecord(now, record)
+
+ def handleQuery(self, msg, addr, port):
+ """Deal with incoming query packets. Provides a response if
+ possible."""
+ out = None
+
+ # Support unicast client responses
+ #
+ if port != _MDNS_PORT:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0)
+ for question in msg.questions:
+ out.addQuestion(question)
+
+ for question in msg.questions:
+ if question.type == _TYPE_PTR:
+ for service in self.services.values():
+ if question.name == service.type:
+ if out is None:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, service.name))
+ else:
+ try:
+ if out is None:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+
+ # Answer A record queries for any service addresses we know
+ if question.type == _TYPE_A or question.type == _TYPE_ANY:
+ for service in self.services.values():
+ if service.server == question.name.lower():
+ out.addAnswer(msg, DNSAddress(question.name, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address))
+
+ service = self.services.get(question.name.lower(), None)
+ if not service: continue
+
+ if question.type == _TYPE_SRV or question.type == _TYPE_ANY:
+ out.addAnswer(msg, DNSService(question.name, _TYPE_SRV, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.priority, service.weight, service.port, service.server))
+ if question.type == _TYPE_TXT or question.type == _TYPE_ANY:
+ out.addAnswer(msg, DNSText(question.name, _TYPE_TXT, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text))
+ if question.type == _TYPE_SRV:
+ out.addAdditionalAnswer(DNSAddress(service.server, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address))
+ except:
+ traceback.print_exc()
+
+ if out is not None and out.answers:
+ out.id = msg.id
+ self.send(out, addr, port)
+
+ def send(self, out, addr = _MDNS_ADDR, port = _MDNS_PORT):
+ """Sends an outgoing packet."""
+ # This is a quick test to see if we can parse the packets we generate
+ #temp = DNSIncoming(out.packet())
+ try:
+ bytes_sent = self.socket.sendto(out.packet(), 0, (addr, port))
+ except:
+ # Ignore this, it may be a temporary loss of network connection
+ pass
+
+ def close(self):
+ """Ends the background threads, and prevent this instance from
+ servicing further queries."""
+ if globals()['_GLOBAL_DONE'] == 0:
+ globals()['_GLOBAL_DONE'] = 1
+ self.notifyAll()
+ self.engine.notify()
+ self.unregisterAllServices()
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
+ self.socket.close()
+
+# Test a few module features, including service registration, service
+# query (for Zoe), and service unregistration.
+
+if __name__ == '__main__':
+ print "Multicast DNS Service Discovery for Python, version", __version__
+ r = Zeroconf()
+ print "1. Testing registration of a service..."
+ desc = {'version':'0.10','a':'test value', 'b':'another value'}
+ info = ServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.", socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc)
+ print " Registering service..."
+ r.registerService(info)
+ print " Registration done."
+ print "2. Testing query of service information..."
+ print " Getting ZOE service:", str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local."))
+ print " Query done."
+ print "3. Testing query of own service..."
+ print " Getting self:", str(r.getServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local."))
+ print " Query done."
+ print "4. Testing unregister of service information..."
+ r.unregisterService(info)
+ print " Unregister done."
+ r.close()
diff --git a/testing/mozbase/mozdevice/mozdevice/__init__.py b/testing/mozbase/mozdevice/mozdevice/__init__.py
new file mode 100644
index 000000000..2493f75db
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/__init__.py
@@ -0,0 +1,15 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from adb import ADBError, ADBRootError, ADBTimeoutError, ADBProcess, ADBCommand, ADBHost, ADBDevice
+from adb_android import ADBAndroid
+from adb_b2g import ADBB2G
+from devicemanager import DeviceManager, DMError, ZeroconfListener
+from devicemanagerADB import DeviceManagerADB
+from devicemanagerSUT import DeviceManagerSUT
+from droid import DroidADB, DroidSUT, DroidConnectByHWID
+
+__all__ = ['ADBError', 'ADBRootError', 'ADBTimeoutError', 'ADBProcess', 'ADBCommand', 'ADBHost',
+ 'ADBDevice', 'ADBAndroid', 'ADBB2G', 'DeviceManager', 'DMError', 'ZeroconfListener',
+ 'DeviceManagerADB', 'DeviceManagerSUT', 'DroidADB', 'DroidSUT', 'DroidConnectByHWID']
diff --git a/testing/mozbase/mozdevice/mozdevice/adb.py b/testing/mozbase/mozdevice/mozdevice/adb.py
new file mode 100644
index 000000000..5958937d9
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/adb.py
@@ -0,0 +1,2271 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+import re
+import shutil
+import subprocess
+import tempfile
+import time
+import traceback
+
+from abc import ABCMeta, abstractmethod
+from distutils import dir_util
+
+
+class ADBProcess(object):
+ """ADBProcess encapsulates the data related to executing the adb process."""
+
+ def __init__(self, args):
+ #: command argument argument list.
+ self.args = args
+ #: Temporary file handle to be used for stdout.
+ self.stdout_file = tempfile.TemporaryFile()
+ #: boolean indicating if the command timed out.
+ self.timedout = None
+ #: exitcode of the process.
+ self.exitcode = None
+ #: subprocess Process object used to execute the command.
+ self.proc = subprocess.Popen(args,
+ stdout=self.stdout_file,
+ stderr=subprocess.STDOUT)
+
+ @property
+ def stdout(self):
+ """Return the contents of stdout."""
+ if not self.stdout_file or self.stdout_file.closed:
+ content = ""
+ else:
+ self.stdout_file.seek(0, os.SEEK_SET)
+ content = self.stdout_file.read().rstrip()
+ return content
+
+ def __str__(self):
+ return ('args: %s, exitcode: %s, stdout: %s' % (
+ ' '.join(self.args), self.exitcode, self.stdout))
+
+# ADBError, ADBRootError, and ADBTimeoutError are treated
+# differently in order that unhandled ADBRootErrors and
+# ADBTimeoutErrors can be handled distinctly from ADBErrors.
+
+
+class ADBError(Exception):
+ """ADBError is raised in situations where a command executed on a
+ device either exited with a non-zero exitcode or when an
+ unexpected error condition has occurred. Generally, ADBErrors can
+ be handled and the device can continue to be used.
+ """
+ pass
+
+
+class ADBListDevicesError(ADBError):
+ """ADBListDevicesError is raised when errors are found listing the
+ devices, typically not any permissions.
+
+ The devices information is stocked with the *devices* member.
+ """
+
+ def __init__(self, msg, devices):
+ ADBError.__init__(self, msg)
+ self.devices = devices
+
+
+class ADBRootError(Exception):
+ """ADBRootError is raised when a shell command is to be executed as
+ root but the device does not support it. This error is fatal since
+ there is no recovery possible by the script. You must either root
+ your device or change your scripts to not require running as root.
+ """
+ pass
+
+
+class ADBTimeoutError(Exception):
+ """ADBTimeoutError is raised when either a host command or shell
+ command takes longer than the specified timeout to execute. The
+ timeout value is set in the ADBCommand constructor and is 300 seconds by
+ default. This error is typically fatal since the host is having
+ problems communicating with the device. You may be able to recover
+ by rebooting, but this is not guaranteed.
+
+ Recovery options are:
+
+ * Killing and restarting the adb server via
+ ::
+
+ adb kill-server; adb start-server
+
+ * Rebooting the device manually.
+ * Rebooting the host.
+ """
+ pass
+
+
+class ADBCommand(object):
+ """ADBCommand provides a basic interface to adb commands
+ which is used to provide the 'command' methods for the
+ classes ADBHost and ADBDevice.
+
+ ADBCommand should only be used as the base class for other
+ classes and should not be instantiated directly. To enforce this
+ restriction calling ADBCommand's constructor will raise a
+ NonImplementedError exception.
+
+ ::
+
+ from mozdevice import ADBCommand
+
+ try:
+ adbcommand = ADBCommand()
+ except NotImplementedError:
+ print "ADBCommand can not be instantiated."
+ """
+
+ def __init__(self,
+ adb='adb',
+ adb_host=None,
+ adb_port=None,
+ logger_name='adb',
+ timeout=300,
+ verbose=False):
+ """Initializes the ADBCommand object.
+
+ :param str adb: path to adb executable. Defaults to 'adb'.
+ :param adb_host: host of the adb server.
+ :type adb_host: str or None
+ :param adb_port: port of the adb server.
+ :type adb_port: integer or None
+ :param str logger_name: logging logger name. Defaults to 'adb'.
+
+ :raises: * ADBError
+ * ADBTimeoutError
+ """
+ if self.__class__ == ADBCommand:
+ raise NotImplementedError
+
+ self._logger = self._get_logger(logger_name)
+ self._verbose = verbose
+ self._adb_path = adb
+ self._adb_host = adb_host
+ self._adb_port = adb_port
+ self._timeout = timeout
+ self._polling_interval = 0.1
+ self._adb_version = ''
+
+ self._logger.debug("%s: %s" % (self.__class__.__name__,
+ self.__dict__))
+
+ # catch early a missing or non executable adb command
+ # and get the adb version while we are at it.
+ try:
+ output = subprocess.Popen([adb, 'version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()
+ re_version = re.compile(r'Android Debug Bridge version (.*)')
+ self._adb_version = re_version.match(output[0]).group(1)
+ except Exception as exc:
+ raise ADBError('%s: %s is not executable.' % (exc, adb))
+
+ def _get_logger(self, logger_name):
+ logger = None
+ try:
+ import mozlog
+ logger = mozlog.get_default_logger(logger_name)
+ except ImportError:
+ pass
+
+ if logger is None:
+ import logging
+ logger = logging.getLogger(logger_name)
+ return logger
+
+ # Host Command methods
+
+ def command(self, cmds, device_serial=None, timeout=None):
+ """Executes an adb command on the host.
+
+ :param list cmds: The command and its arguments to be
+ executed.
+ :param device_serial: The device's
+ serial number if the adb command is to be executed against
+ a specific device.
+ :type device_serial: str or None
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBCommand constructor is used.
+ :type timeout: integer or None
+ :returns: :class:`mozdevice.ADBProcess`
+
+ command() provides a low level interface for executing
+ commands on the host via adb.
+
+ command() executes on the host in such a fashion that stdout
+ of the adb process is a file handle on the host and
+ the exit code is available as the exit code of the adb
+ process.
+
+ The caller provides a list containing commands, as well as a
+ timeout period in seconds.
+
+ A subprocess is spawned to execute adb with stdout and stderr
+ directed to a temporary file. If the process takes longer than
+ the specified timeout, the process is terminated.
+
+ It is the caller's responsibilty to clean up by closing
+ the stdout temporary file.
+ """
+ args = [self._adb_path]
+ if self._adb_host:
+ args.extend(['-H', self._adb_host])
+ if self._adb_port:
+ args.extend(['-P', str(self._adb_port)])
+ if device_serial:
+ args.extend(['-s', device_serial, 'wait-for-device'])
+ args.extend(cmds)
+
+ adb_process = ADBProcess(args)
+
+ if timeout is None:
+ timeout = self._timeout
+
+ start_time = time.time()
+ adb_process.exitcode = adb_process.proc.poll()
+ while ((time.time() - start_time) <= timeout and
+ adb_process.exitcode is None):
+ time.sleep(self._polling_interval)
+ adb_process.exitcode = adb_process.proc.poll()
+ if adb_process.exitcode is None:
+ adb_process.proc.kill()
+ adb_process.timedout = True
+ adb_process.exitcode = adb_process.proc.poll()
+
+ adb_process.stdout_file.seek(0, os.SEEK_SET)
+
+ return adb_process
+
+ def command_output(self, cmds, device_serial=None, timeout=None):
+ """Executes an adb command on the host returning stdout.
+
+ :param list cmds: The command and its arguments to be
+ executed.
+ :param device_serial: The device's
+ serial number if the adb command is to be executed against
+ a specific device.
+ :type device_serial: str or None
+ :param timeout: The maximum time in seconds
+ for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBCommand constructor is used.
+ :type timeout: integer or None
+ :returns: string - content of stdout.
+
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ adb_process = None
+ try:
+ # Need to force the use of the ADBCommand class's command
+ # since ADBDevice will redefine command and call its
+ # own version otherwise.
+ adb_process = ADBCommand.command(self, cmds,
+ device_serial=device_serial,
+ timeout=timeout)
+ if adb_process.timedout:
+ raise ADBTimeoutError("%s" % adb_process)
+ elif adb_process.exitcode:
+ raise ADBError("%s" % adb_process)
+ output = adb_process.stdout_file.read().rstrip()
+ if self._verbose:
+ self._logger.debug('command_output: %s, '
+ 'timeout: %s, '
+ 'timedout: %s, '
+ 'exitcode: %s, output: %s' %
+ (' '.join(adb_process.args),
+ timeout,
+ adb_process.timedout,
+ adb_process.exitcode,
+ output))
+
+ return output
+ finally:
+ if adb_process and isinstance(adb_process.stdout_file, file):
+ adb_process.stdout_file.close()
+
+
+class ADBHost(ADBCommand):
+ """ADBHost provides a basic interface to adb host commands
+ which do not target a specific device.
+
+ ::
+
+ from mozdevice import ADBHost
+
+ adbhost = ADBHost()
+ adbhost.start_server()
+ """
+
+ def __init__(self,
+ adb='adb',
+ adb_host=None,
+ adb_port=None,
+ logger_name='adb',
+ timeout=300,
+ verbose=False):
+ """Initializes the ADBHost object.
+
+ :param str adb: path to adb executable. Defaults to 'adb'.
+ :param adb_host: host of the adb server.
+ :type adb_host: str or None
+ :param adb_port: port of the adb server.
+ :type adb_port: integer or None
+ :param str logger_name: logging logger name. Defaults to 'adb'.
+
+ :raises: * ADBError
+ * ADBTimeoutError
+ """
+ ADBCommand.__init__(self, adb=adb, adb_host=adb_host,
+ adb_port=adb_port, logger_name=logger_name,
+ timeout=timeout, verbose=verbose)
+
+ def command(self, cmds, timeout=None):
+ """Executes an adb command on the host.
+
+ :param list cmds: The command and its arguments to be
+ executed.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBHost constructor is used.
+ :type timeout: integer or None
+ :returns: :class:`mozdevice.ADBProcess`
+
+ command() provides a low level interface for executing
+ commands on the host via adb.
+
+ command() executes on the host in such a fashion that stdout
+ of the adb process is a file handle on the host and
+ the exit code is available as the exit code of the adb
+ process.
+
+ The caller provides a list containing commands, as well as a
+ timeout period in seconds.
+
+ A subprocess is spawned to execute adb with stdout and stderr
+ directed to a temporary file. If the process takes longer than
+ the specified timeout, the process is terminated.
+
+ It is the caller's responsibilty to clean up by closing
+ the stdout temporary file.
+ """
+ return ADBCommand.command(self, cmds, timeout=timeout)
+
+ def command_output(self, cmds, timeout=None):
+ """Executes an adb command on the host returning stdout.
+
+ :param list cmds: The command and its arguments to be
+ executed.
+ :param timeout: The maximum time in seconds
+ for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBHost constructor is used.
+ :type timeout: integer or None
+ :returns: string - content of stdout.
+
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ return ADBCommand.command_output(self, cmds, timeout=timeout)
+
+ def start_server(self, timeout=None):
+ """Starts the adb server.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBHost constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+
+ Attempting to use start_server with any adb_host value other than None
+ will fail with an ADBError exception.
+
+ You will need to start the server on the remote host via the command:
+
+ .. code-block:: shell
+
+ adb -a fork-server server
+
+ If you wish the remote adb server to restart automatically, you can
+ enclose the command in a loop as in:
+
+ .. code-block:: shell
+
+ while true; do
+ adb -a fork-server server
+ done
+ """
+ self.command_output(["start-server"], timeout=timeout)
+
+ def kill_server(self, timeout=None):
+ """Kills the adb server.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBHost constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ self.command_output(["kill-server"], timeout=timeout)
+
+ def devices(self, timeout=None):
+ """Executes adb devices -l and returns a list of objects describing attached devices.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBHost constructor is used.
+ :type timeout: integer or None
+ :returns: an object contain
+ :raises: * ADBTimeoutError
+ * ADBListDevicesError
+ * ADBError
+
+ The output of adb devices -l ::
+
+ $ adb devices -l
+ List of devices attached
+ b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw
+
+ is parsed and placed into an object as in
+
+ [{'device_serial': 'b313b945', 'state': 'device', 'product': 'd2vzw',
+ 'usb': '1-7', 'device': 'd2vzw', 'model': 'SCH_I535' }]
+ """
+ # b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw
+ # from Android system/core/adb/transport.c statename()
+ re_device_info = re.compile(
+ r"([^\s]+)\s+(offline|bootloader|device|host|recovery|sideload|"
+ "no permissions|unauthorized|unknown)")
+ devices = []
+ lines = self.command_output(["devices", "-l"], timeout=timeout).split('\n')
+ for line in lines:
+ if line == 'List of devices attached ':
+ continue
+ match = re_device_info.match(line)
+ if match:
+ device = {
+ 'device_serial': match.group(1),
+ 'state': match.group(2)
+ }
+ remainder = line[match.end(2):].strip()
+ if remainder:
+ try:
+ device.update(dict([j.split(':')
+ for j in remainder.split(' ')]))
+ except ValueError:
+ self._logger.warning('devices: Unable to parse '
+ 'remainder for device %s' % line)
+ devices.append(device)
+ for device in devices:
+ if device['state'] == 'no permissions':
+ raise ADBListDevicesError(
+ "No permissions to detect devices. You should restart the"
+ " adb server as root:\n"
+ "\n# adb kill-server\n# adb start-server\n"
+ "\nor maybe configure your udev rules.",
+ devices)
+ return devices
+
+
+class ADBDevice(ADBCommand):
+ """ADBDevice is an abstract base class which provides methods which
+ can be used to interact with the associated Android or B2G based
+ device. It must be used via one of the concrete implementations in
+ :class:`ADBAndroid` or :class:`ADBB2G`.
+ """
+ __metaclass__ = ABCMeta
+
+ def __init__(self,
+ device=None,
+ adb='adb',
+ adb_host=None,
+ adb_port=None,
+ test_root='',
+ logger_name='adb',
+ timeout=300,
+ verbose=False,
+ device_ready_retry_wait=20,
+ device_ready_retry_attempts=3):
+ """Initializes the ADBDevice object.
+
+ :param device: When a string is passed, it is interpreted as the
+ device serial number. This form is not compatible with
+ devices containing a ":" in the serial; in this case
+ ValueError will be raised.
+ When a dictionary is passed it must have one or both of
+ the keys "device_serial" and "usb". This is compatible
+ with the dictionaries in the list returned by
+ ADBHost.devices(). If the value of device_serial is a
+ valid serial not containing a ":" it will be used to
+ identify the device, otherwise the value of the usb key,
+ prefixed with "usb:" is used.
+ If None is passed and there is exactly one device attached
+ to the host, that device is used. If there is more than one
+ device attached, ValueError is raised. If no device is
+ attached the constructor will block until a device is
+ attached or the timeout is reached.
+ :type device: dict, str or None
+ :param adb_host: host of the adb server to connect to.
+ :type adb_host: str or None
+ :param adb_port: port of the adb server to connect to.
+ :type adb_port: integer or None
+ :param str logger_name: logging logger name. Defaults to 'adb'.
+ :param integer device_ready_retry_wait: number of seconds to wait
+ between attempts to check if the device is ready after a
+ reboot.
+ :param integer device_ready_retry_attempts: number of attempts when
+ checking if a device is ready.
+
+ :raises: * ADBError
+ * ADBTimeoutError
+ * ValueError
+ """
+ ADBCommand.__init__(self, adb=adb, adb_host=adb_host,
+ adb_port=adb_port, logger_name=logger_name,
+ timeout=timeout, verbose=verbose)
+ self._device_serial = self._get_device_serial(device)
+ self._initial_test_root = test_root
+ self._test_root = None
+ self._device_ready_retry_wait = device_ready_retry_wait
+ self._device_ready_retry_attempts = device_ready_retry_attempts
+ self._have_root_shell = False
+ self._have_su = False
+ self._have_android_su = False
+
+ # Catch exceptions due to the potential for segfaults
+ # calling su when using an improperly rooted device.
+
+ # Note this check to see if adbd is running is performed on
+ # the device in the state it exists in when the ADBDevice is
+ # initialized. It may be the case that it has been manipulated
+ # since its last boot and that its current state does not
+ # match the state the device will have immediately after a
+ # reboot. For example, if adb root was called manually prior
+ # to ADBDevice being initialized, then self._have_root_shell
+ # will not reflect the state of the device after it has been
+ # rebooted again. Therefore this check will need to be
+ # performed again after a reboot.
+
+ self._check_adb_root(timeout=timeout)
+
+ uid = 'uid=0'
+ # Do we have a 'Superuser' sh like su?
+ try:
+ if self.shell_output("su -c id", timeout=timeout).find(uid) != -1:
+ self._have_su = True
+ self._logger.info("su -c supported")
+ except ADBError:
+ self._logger.debug("Check for su -c failed")
+
+ # Do we have Android's su?
+ try:
+ if self.shell_output("su 0 id", timeout=timeout).find(uid) != -1:
+ self._have_android_su = True
+ self._logger.info("su 0 supported")
+ except ADBError:
+ self._logger.debug("Check for su 0 failed")
+
+ self._mkdir_p = None
+ # Force the use of /system/bin/ls or /system/xbin/ls in case
+ # there is /sbin/ls which embeds ansi escape codes to colorize
+ # the output. Detect if we are using busybox ls. We want each
+ # entry on a single line and we don't want . or ..
+ if self.shell_bool("/system/bin/ls /data/local/tmp", timeout=timeout):
+ self._ls = "/system/bin/ls"
+ elif self.shell_bool("/system/xbin/ls /data/local/tmp", timeout=timeout):
+ self._ls = "/system/xbin/ls"
+ else:
+ raise ADBError("ADBDevice.__init__: ls not found")
+ try:
+ self.shell_output("%s -1A /data/local/tmp" % self._ls, timeout=timeout)
+ self._ls += " -1A"
+ except ADBError:
+ self._ls += " -a"
+
+ self._logger.info("%s supported" % self._ls)
+
+ # Do we have cp?
+ self._have_cp = self.shell_bool("type cp", timeout=timeout)
+ self._logger.info("Native cp support: %s" % self._have_cp)
+
+ # Do we have chmod -R?
+ try:
+ self._chmod_R = False
+ re_recurse = re.compile(r'[-]R')
+ chmod_output = self.shell_output("chmod --help", timeout=timeout)
+ match = re_recurse.search(chmod_output)
+ if match:
+ self._chmod_R = True
+ except (ADBError, ADBTimeoutError) as e:
+ self._logger.debug('Check chmod -R: %s' % e)
+ match = re_recurse.search(e.message)
+ if match:
+ self._chmod_R = True
+ self._logger.info("Native chmod -R support: %s" % self._chmod_R)
+
+ self._logger.debug("ADBDevice: %s" % self.__dict__)
+
+ def _get_device_serial(self, device):
+ if device is None:
+ devices = ADBHost(adb=self._adb_path, adb_host=self._adb_host,
+ adb_port=self._adb_port).devices()
+ if len(devices) > 1:
+ raise ValueError("ADBDevice called with multiple devices "
+ "attached and no device specified")
+ elif len(devices) == 0:
+ # We could error here, but this way we'll wait-for-device before we next
+ # run a command, which seems more friendly
+ return
+ device = devices[0]
+
+ def is_valid_serial(serial):
+ return ":" not in serial or serial.startswith("usb:")
+
+ if isinstance(device, (str, unicode)):
+ # Treat this as a device serial
+ if not is_valid_serial(device):
+ raise ValueError("Device serials containing ':' characters are "
+ "invalid. Pass the output from "
+ "ADBHost.devices() for the device instead")
+ return device
+
+ serial = device.get("device_serial")
+ if serial is not None and is_valid_serial(serial):
+ return serial
+ usb = device.get("usb")
+ if usb is not None:
+ return "usb:%s" % usb
+
+ raise ValueError("Unable to get device serial")
+
+ def _check_adb_root(self, timeout=None):
+ self._have_root_shell = False
+ uid = 'uid=0'
+ # Is shell already running as root?
+ try:
+ if self.shell_output("id", timeout=timeout).find(uid) != -1:
+ self._have_root_shell = True
+ self._logger.info("adbd running as root")
+ except ADBError:
+ self._logger.debug("Check for root shell failed")
+
+ # Do we need to run adb root to get a root shell?
+ try:
+ if (not self._have_root_shell and self.command_output(
+ ["root"],
+ timeout=timeout).find("cannot run as root") == -1):
+ self._have_root_shell = True
+ self._logger.info("adbd restarted as root")
+ except ADBError:
+ self._logger.debug("Check for root adbd failed")
+
+ @staticmethod
+ def _escape_command_line(cmd):
+ """Utility function to return escaped and quoted version of command
+ line.
+ """
+ quoted_cmd = []
+
+ for arg in cmd:
+ arg.replace('&', r'\&')
+
+ needs_quoting = False
+ for char in [' ', '(', ')', '"', '&']:
+ if arg.find(char) >= 0:
+ needs_quoting = True
+ break
+ if needs_quoting:
+ arg = "'%s'" % arg
+
+ quoted_cmd.append(arg)
+
+ return " ".join(quoted_cmd)
+
+ @staticmethod
+ def _get_exitcode(file_obj):
+ """Get the exitcode from the last line of the file_obj for shell
+ commands.
+ """
+ file_obj.seek(0, os.SEEK_END)
+
+ line = ''
+ length = file_obj.tell()
+ offset = 1
+ while length - offset >= 0:
+ file_obj.seek(-offset, os.SEEK_END)
+ char = file_obj.read(1)
+ if not char:
+ break
+ if char != '\r' and char != '\n':
+ line = char + line
+ elif line:
+ # we have collected everything up to the beginning of the line
+ break
+ offset += 1
+
+ match = re.match(r'rc=([0-9]+)', line)
+ if match:
+ exitcode = int(match.group(1))
+ file_obj.seek(-1, os.SEEK_CUR)
+ file_obj.truncate()
+ else:
+ exitcode = None
+
+ return exitcode
+
+ @property
+ def test_root(self):
+ """
+ The test_root property returns the directory on the device where
+ temporary test files are stored.
+
+ The first time test_root it is called it determines and caches a value
+ for the test root on the device. It determines the appropriate test
+ root by attempting to create a 'dummy' directory on each of a list of
+ directories and returning the first successful directory as the
+ test_root value.
+
+ The default list of directories checked by test_root are:
+
+ - /storage/sdcard0/tests
+ - /storage/sdcard1/tests
+ - /sdcard/tests
+ - /mnt/sdcard/tests
+ - /data/local/tests
+
+ You may override the default list by providing a test_root argument to
+ the :class:`ADBDevice` constructor which will then be used when
+ attempting to create the 'dummy' directory.
+
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ if self._test_root is not None:
+ return self._test_root
+
+ if self._initial_test_root:
+ paths = [self._initial_test_root]
+ else:
+ paths = ['/storage/sdcard0/tests',
+ '/storage/sdcard1/tests',
+ '/sdcard/tests',
+ '/mnt/sdcard/tests',
+ '/data/local/tests']
+
+ max_attempts = 3
+ for attempt in range(1, max_attempts + 1):
+ for test_root in paths:
+ self._logger.debug("Setting test root to %s attempt %d of %d" %
+ (test_root, attempt, max_attempts))
+
+ if self._try_test_root(test_root):
+ self._test_root = test_root
+ return self._test_root
+
+ self._logger.debug('_setup_test_root: '
+ 'Attempt %d of %d failed to set test_root to %s' %
+ (attempt, max_attempts, test_root))
+
+ if attempt != max_attempts:
+ time.sleep(20)
+
+ raise ADBError("Unable to set up test root using paths: [%s]"
+ % ", ".join(paths))
+
+ def _try_test_root(self, test_root):
+ base_path, sub_path = posixpath.split(test_root)
+ if not self.is_dir(base_path):
+ return False
+
+ try:
+ dummy_dir = posixpath.join(test_root, 'dummy')
+ if self.is_dir(dummy_dir):
+ self.rm(dummy_dir, recursive=True)
+ self.mkdir(dummy_dir, parents=True)
+ except ADBError:
+ self._logger.debug("%s is not writable" % test_root)
+ return False
+
+ return True
+
+ # Host Command methods
+
+ def command(self, cmds, timeout=None):
+ """Executes an adb command on the host against the device.
+
+ :param list cmds: The command and its arguments to be
+ executed.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: :class:`mozdevice.ADBProcess`
+
+ command() provides a low level interface for executing
+ commands for a specific device on the host via adb.
+
+ command() executes on the host in such a fashion that stdout
+ of the adb process are file handles on the host and
+ the exit code is available as the exit code of the adb
+ process.
+
+ For executing shell commands on the device, use
+ ADBDevice.shell(). The caller provides a list containing
+ commands, as well as a timeout period in seconds.
+
+ A subprocess is spawned to execute adb for the device with
+ stdout and stderr directed to a temporary file. If the process
+ takes longer than the specified timeout, the process is
+ terminated.
+
+ It is the caller's responsibilty to clean up by closing
+ the stdout temporary file.
+ """
+
+ return ADBCommand.command(self, cmds,
+ device_serial=self._device_serial,
+ timeout=timeout)
+
+ def command_output(self, cmds, timeout=None):
+ """Executes an adb command on the host against the device returning
+ stdout.
+
+ :param list cmds: The command and its arguments to be executed.
+ :param timeout: The maximum time in seconds
+ for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: string - content of stdout.
+
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ return ADBCommand.command_output(self, cmds,
+ device_serial=self._device_serial,
+ timeout=timeout)
+
+ # Port forwarding methods
+
+ def _validate_port(self, port, is_local=True):
+ """Validate a port forwarding specifier. Raises ValueError on failure.
+
+ :param str port: The port specifier to validate
+ :param bool is_local: Flag indicating whether the port represents a local port.
+ """
+ prefixes = ["tcp", "localabstract", "localreserved", "localfilesystem", "dev"]
+
+ if not is_local:
+ prefixes += ["jdwp"]
+
+ parts = port.split(":", 1)
+ if len(parts) != 2 or parts[0] not in prefixes:
+ raise ValueError("Invalid forward specifier %s" % port)
+
+ def forward(self, local, remote, allow_rebind=True, timeout=None):
+ """Forward a local port to a specific port on the device.
+
+ Ports are specified in the form:
+ tcp:<port>
+ localabstract:<unix domain socket name>
+ localreserved:<unix domain socket name>
+ localfilesystem:<unix domain socket name>
+ dev:<character device name>
+ jdwp:<process pid> (remote only)
+
+ :param str local: Local port to forward
+ :param str remote: Remote port to which to forward
+ :param bool allow_rebind: Don't error if the local port is already forwarded
+ :param timeout: The maximum time in seconds
+ for any spawned adb process to complete before throwing
+ an ADBTimeoutError. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :raises: * ValueError
+ * ADBTimeoutError
+ * ADBError
+ """
+
+ for port, is_local in [(local, True), (remote, False)]:
+ self._validate_port(port, is_local=is_local)
+
+ cmd = ["forward", local, remote]
+ if not allow_rebind:
+ cmd.insert(1, "--no-rebind")
+ self.command_output(cmd, timeout=timeout)
+
+ def list_forwards(self, timeout=None):
+ """Return a list of tuples specifying active forwards
+
+ Return values are of the form (device, local, remote).
+
+ :param timeout: The maximum time in seconds
+ for any spawned adb process to complete before throwing
+ an ADBTimeoutError. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ forwards = self.command_output(["forward", "--list"], timeout=timeout)
+ return [tuple(line.split(" ")) for line in forwards.split("\n") if line.strip()]
+
+ def remove_forwards(self, local=None, timeout=None):
+ """Remove existing port forwards.
+
+ :param local: local port specifier as for ADBDevice.forward. If local
+ is not specified removes all forwards.
+ :type local: str or None
+ :param timeout: The maximum time in seconds
+ for any spawned adb process to complete before throwing
+ an ADBTimeoutError. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :raises: * ValueError
+ * ADBTimeoutError
+ * ADBError
+ """
+ cmd = ["forward"]
+ if local is None:
+ cmd.extend(["--remove-all"])
+ else:
+ self._validate_port(local, is_local=True)
+ cmd.extend(["--remove", local])
+
+ self.command_output(cmd, timeout=timeout)
+
+ # Device Shell methods
+
+ def shell(self, cmd, env=None, cwd=None, timeout=None, root=False):
+ """Executes a shell command on the device.
+
+ :param str cmd: The command to be executed.
+ :param env: Contains the environment variables and
+ their values.
+ :type env: dict or None
+ :param cwd: The directory from which to execute.
+ :type cwd: str or None
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per adb call. The
+ total time spent may exceed this value. If it is not
+ specified, the value set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :returns: :class:`mozdevice.ADBProcess`
+ :raises: ADBRootError
+
+ shell() provides a low level interface for executing commands
+ on the device via adb shell.
+
+ shell() executes on the host in such as fashion that stdout
+ contains the stdout and stderr of the host abd process
+ combined with the stdout and stderr of the shell command
+ on the device. The exit code of shell() is the exit code of
+ the adb command if it was non-zero or the extracted exit code
+ from the output of the shell command executed on the
+ device.
+
+ The caller provides a flag indicating if the command is to be
+ executed as root, a string for any requested working
+ directory, a hash defining the environment, a string
+ containing shell commands, as well as a timeout period in
+ seconds.
+
+ The command line to be executed is created to set the current
+ directory, set the required environment variables, optionally
+ execute the command using su and to output the return code of
+ the command to stdout. The command list is created as a
+ command sequence separated by && which will terminate the
+ command sequence on the first command which returns a non-zero
+ exit code.
+
+ A subprocess is spawned to execute adb shell for the device
+ with stdout and stderr directed to a temporary file. If the
+ process takes longer than the specified timeout, the process
+ is terminated. The return code is extracted from the stdout
+ and is then removed from the file.
+
+ It is the caller's responsibilty to clean up by closing
+ the stdout temporary files.
+
+ """
+ if root and not self._have_root_shell:
+ # If root was requested and we do not already have a root
+ # shell, then use the appropriate version of su to invoke
+ # the shell cmd. Prefer Android's su version since it may
+ # falsely report support for su -c.
+ if self._have_android_su:
+ cmd = "su 0 %s" % cmd
+ elif self._have_su:
+ cmd = "su -c \"%s\"" % cmd
+ else:
+ raise ADBRootError('Can not run command %s as root!' % cmd)
+
+ # prepend cwd and env to command if necessary
+ if cwd:
+ cmd = "cd %s && %s" % (cwd, cmd)
+ if env:
+ envstr = '&& '.join(map(lambda x: 'export %s=%s' %
+ (x[0], x[1]), env.iteritems()))
+ cmd = envstr + "&& " + cmd
+ cmd += "; echo rc=$?"
+
+ args = [self._adb_path]
+ if self._adb_host:
+ args.extend(['-H', self._adb_host])
+ if self._adb_port:
+ args.extend(['-P', str(self._adb_port)])
+ if self._device_serial:
+ args.extend(['-s', self._device_serial])
+ args.extend(["wait-for-device", "shell", cmd])
+ adb_process = ADBProcess(args)
+
+ if timeout is None:
+ timeout = self._timeout
+
+ start_time = time.time()
+ exitcode = adb_process.proc.poll()
+ while ((time.time() - start_time) <= timeout) and exitcode is None:
+ time.sleep(self._polling_interval)
+ exitcode = adb_process.proc.poll()
+ if exitcode is None:
+ adb_process.proc.kill()
+ adb_process.timedout = True
+ adb_process.exitcode = adb_process.proc.poll()
+ elif exitcode == 0:
+ adb_process.exitcode = self._get_exitcode(adb_process.stdout_file)
+ else:
+ adb_process.exitcode = exitcode
+
+ adb_process.stdout_file.seek(0, os.SEEK_SET)
+
+ return adb_process
+
+ def shell_bool(self, cmd, env=None, cwd=None, timeout=None, root=False):
+ """Executes a shell command on the device returning True on success
+ and False on failure.
+
+ :param str cmd: The command to be executed.
+ :param env: Contains the environment variables and
+ their values.
+ :type env: dict or None
+ :param cwd: The directory from which to execute.
+ :type cwd: str or None
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :returns: boolean
+
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ """
+ adb_process = None
+ try:
+ adb_process = self.shell(cmd, env=env, cwd=cwd,
+ timeout=timeout, root=root)
+ if adb_process.timedout:
+ raise ADBTimeoutError("%s" % adb_process)
+ return adb_process.exitcode == 0
+ finally:
+ if adb_process:
+ adb_process.stdout_file.close()
+
+ def shell_output(self, cmd, env=None, cwd=None, timeout=None, root=False):
+ """Executes an adb shell on the device returning stdout.
+
+ :param str cmd: The command to be executed.
+ :param env: Contains the environment variables and their values.
+ :type env: dict or None
+ :param cwd: The directory from which to execute.
+ :type cwd: str or None
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per
+ adb call. The total time spent may exceed this
+ value. If it is not specified, the value set
+ in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command
+ should be executed as root.
+ :returns: string - content of stdout.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ adb_process = None
+ try:
+ adb_process = self.shell(cmd, env=env, cwd=cwd,
+ timeout=timeout, root=root)
+ if adb_process.timedout:
+ raise ADBTimeoutError("%s" % adb_process)
+ elif adb_process.exitcode:
+ raise ADBError("%s" % adb_process)
+ output = adb_process.stdout_file.read().rstrip()
+ if self._verbose:
+ self._logger.debug('shell_output: %s, '
+ 'timeout: %s, '
+ 'root: %s, '
+ 'timedout: %s, '
+ 'exitcode: %s, '
+ 'output: %s' %
+ (' '.join(adb_process.args),
+ timeout,
+ root,
+ adb_process.timedout,
+ adb_process.exitcode,
+ output))
+
+ return output
+ finally:
+ if adb_process and isinstance(adb_process.stdout_file, file):
+ adb_process.stdout_file.close()
+
+ # Informational methods
+
+ def _get_logcat_buffer_args(self, buffers):
+ valid_buffers = set(['radio', 'main', 'events'])
+ invalid_buffers = set(buffers).difference(valid_buffers)
+ if invalid_buffers:
+ raise ADBError('Invalid logcat buffers %s not in %s ' % (
+ list(invalid_buffers), list(valid_buffers)))
+ args = []
+ for b in buffers:
+ args.extend(['-b', b])
+ return args
+
+ def clear_logcat(self, timeout=None, buffers=[]):
+ """Clears logcat via adb logcat -c.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError. This timeout is per
+ adb call. The total time spent may exceed this
+ value. If it is not specified, the value set
+ in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param list buffers: Log buffers to clear. Valid buffers are
+ "radio", "events", and "main". Defaults to "main".
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ buffers = self._get_logcat_buffer_args(buffers)
+ cmds = ["logcat", "-c"] + buffers
+ self.command_output(cmds, timeout=timeout)
+ self.shell_output("log logcat cleared", timeout=timeout)
+
+ def get_logcat(self,
+ filter_specs=[
+ "dalvikvm:I",
+ "ConnectivityService:S",
+ "WifiMonitor:S",
+ "WifiStateTracker:S",
+ "wpa_supplicant:S",
+ "NetworkStateTracker:S"],
+ format="time",
+ filter_out_regexps=[],
+ timeout=None,
+ buffers=[]):
+ """Returns the contents of the logcat file as a list of strings.
+
+ :param list filter_specs: Optional logcat messages to
+ be included.
+ :param str format: Optional logcat format.
+ :param list filterOutRexps: Optional logcat messages to be
+ excluded.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param list buffers: Log buffers to retrieve. Valid buffers are
+ "radio", "events", and "main". Defaults to "main".
+ :returns: list of lines logcat output.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ buffers = self._get_logcat_buffer_args(buffers)
+ cmds = ["logcat", "-v", format, "-d"] + buffers + filter_specs
+ lines = self.command_output(cmds, timeout=timeout).split('\r')
+
+ for regex in filter_out_regexps:
+ lines = [line for line in lines if not re.search(regex, line)]
+
+ return lines
+
+ def get_prop(self, prop, timeout=None):
+ """Gets value of a property from the device via adb shell getprop.
+
+ :param str prop: The propery name.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: string value of property.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ output = self.shell_output('getprop %s' % prop, timeout=timeout)
+ return output
+
+ def get_state(self, timeout=None):
+ """Returns the device's state via adb get-state.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: string value of adb get-state.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ output = self.command_output(["get-state"], timeout=timeout).strip()
+ return output
+
+ def get_ip_address(self, interfaces=None, timeout=None):
+ """Returns the device's ip address, or None if it doesn't have one
+
+ :param interfaces: Interfaces to allow, or None to allow any
+ non-loopback interface.
+ :type interfaces: list or None
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: string ip address of the device or None if it could not
+ be found.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ if not interfaces:
+ interfaces = ["wlan0", "eth0"]
+ wifi_interface = self.shell_output('getprop wifi.interface', timeout=timeout)
+ self._logger.debug('get_ip_address: wifi_interface: %s' % wifi_interface)
+ if wifi_interface and wifi_interface not in interfaces:
+ interfaces = interfaces.append(wifi_interface)
+
+ # ifconfig interface
+ # can return two different formats:
+ # eth0: ip 192.168.1.139 mask 255.255.255.0 flags [up broadcast running multicast]
+ # or
+ # wlan0 Link encap:Ethernet HWaddr 00:9A:CD:B8:39:65
+ # inet addr:192.168.1.38 Bcast:192.168.1.255 Mask:255.255.255.0
+ # inet6 addr: fe80::29a:cdff:feb8:3965/64 Scope: Link
+ # UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
+ # RX packets:180 errors:0 dropped:0 overruns:0 frame:0
+ # TX packets:218 errors:0 dropped:0 overruns:0 carrier:0
+ # collisions:0 txqueuelen:1000
+ # RX bytes:84577 TX bytes:31202
+
+ re1_ip = re.compile(r'(\w+): ip ([0-9.]+) mask.*')
+ # re1_ip will match output of the first format
+ # with group 1 returning the interface and group 2 returing the ip address.
+
+ # re2_interface will match the interface line in the second format
+ # while re2_ip will match the inet addr line of the second format.
+ re2_interface = re.compile(r'(\w+)\s+Link')
+ re2_ip = re.compile(r'\s+inet addr:([0-9.]+)')
+
+ matched_interface = None
+ matched_ip = None
+ re_bad_addr = re.compile(r'127.0.0.1|0.0.0.0')
+
+ self._logger.debug('get_ip_address: ifconfig')
+ for interface in interfaces:
+ try:
+ output = self.shell_output('ifconfig %s' % interface,
+ timeout=timeout)
+ except ADBError:
+ output = ''
+
+ for line in output.split("\n"):
+ if not matched_interface:
+ match = re1_ip.match(line)
+ if match:
+ matched_interface, matched_ip = match.groups()
+ else:
+ match = re2_interface.match(line)
+ if match:
+ matched_interface = match.group(1)
+ else:
+ match = re2_ip.match(line)
+ if match:
+ matched_ip = match.group(1)
+
+ if matched_ip:
+ if not re_bad_addr.match(matched_ip):
+ self._logger.debug('get_ip_address: found: %s %s' %
+ (matched_interface, matched_ip))
+ return matched_ip
+ matched_interface = None
+ matched_ip = None
+
+ self._logger.debug('get_ip_address: netcfg')
+ # Fall back on netcfg if ifconfig does not work.
+ # $ adb shell netcfg
+ # lo UP 127.0.0.1/8 0x00000049 00:00:00:00:00:00
+ # dummy0 DOWN 0.0.0.0/0 0x00000082 8e:cd:67:48:b7:c2
+ # rmnet0 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet1 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet2 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet3 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet4 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet5 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet6 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # rmnet7 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00
+ # sit0 DOWN 0.0.0.0/0 0x00000080 00:00:00:00:00:00
+ # vip0 DOWN 0.0.0.0/0 0x00001012 00:01:00:00:00:01
+ # wlan0 UP 192.168.1.157/24 0x00001043 38:aa:3c:1c:f6:94
+
+ re3_netcfg = re.compile(r'(\w+)\s+UP\s+([1-9]\d{0,2}\.\d{1,3}\.\d{1,3}\.\d{1,3})')
+ try:
+ output = self.shell_output('netcfg', timeout=timeout)
+ except ADBError:
+ output = ''
+ for line in output.split("\n"):
+ match = re3_netcfg.search(line)
+ if match:
+ matched_interface, matched_ip = match.groups()
+ if matched_interface == "lo" or re_bad_addr.match(matched_ip):
+ matched_interface = None
+ matched_ip = None
+ elif matched_ip and matched_interface in interfaces:
+ self._logger.debug('get_ip_address: found: %s %s' %
+ (matched_interface, matched_ip))
+ return matched_ip
+ self._logger.debug('get_ip_address: not found')
+ return matched_ip
+
+ # File management methods
+
+ def remount(self, timeout=None):
+ """Remount /system/ in read/write mode
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError"""
+
+ rv = self.command_output(["remount"], timeout=timeout)
+ if not rv.startswith("remount succeeded"):
+ raise ADBError("Unable to remount device")
+
+ def chmod(self, path, recursive=False, mask="777", timeout=None, root=False):
+ """Recursively changes the permissions of a directory on the
+ device.
+
+ :param str path: The directory name on the device.
+ :param bool recursive: Flag specifying if the command should be
+ executed recursively.
+ :param str mask: The octal permissions.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before throwing
+ an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ # Note that on some tests such as webappstartup, an error
+ # occurs during recursive calls to chmod where a "No such file
+ # or directory" error will occur for the
+ # /data/data/org.mozilla.fennec/files/mozilla/*.webapp0/lock
+ # which is a symbolic link to a socket: lock ->
+ # 127.0.0.1:+<port>. On Linux, chmod -R ignores symbolic
+ # links but it appear Android's version does not. We ignore
+ # this type of error, but pass on any other errors that are
+ # detected.
+ path = posixpath.normpath(path.strip())
+ self._logger.debug('chmod: path=%s, recursive=%s, mask=%s, root=%s' %
+ (path, recursive, mask, root))
+ if not recursive:
+ self.shell_output("chmod %s %s" % (mask, path),
+ timeout=timeout, root=root)
+ return
+
+ if self._chmod_R:
+ try:
+ self.shell_output("chmod -R %s %s" % (mask, path),
+ timeout=timeout, root=root)
+ except ADBError as e:
+ if e.message.find('No such file or directory') == -1:
+ raise
+ self._logger.warning('chmod -R %s %s: Ignoring Error: %s' %
+ (mask, path, e.message))
+ return
+ # Obtain a list of the directories and files which match path
+ # and construct a shell script which explictly calls chmod on
+ # each of them.
+ entries = self.ls(path, recursive=recursive, timeout=timeout,
+ root=root)
+ tmpf = None
+ chmodsh = None
+ try:
+ tmpf = tempfile.NamedTemporaryFile(delete=False)
+ for entry in entries:
+ tmpf.write('chmod %s %s\n' % (mask, entry))
+ tmpf.close()
+ chmodsh = '/data/local/tmp/%s' % os.path.basename(tmpf.name)
+ self.push(tmpf.name, chmodsh)
+ self.shell_output('chmod 777 %s' % chmodsh, timeout=timeout,
+ root=root)
+ self.shell_output('sh -c %s' % chmodsh, timeout=timeout,
+ root=root)
+ finally:
+ if tmpf:
+ os.unlink(tmpf.name)
+ if chmodsh:
+ self.rm(chmodsh, timeout=timeout, root=root)
+
+ def exists(self, path, timeout=None, root=False):
+ """Returns True if the path exists on the device.
+
+ :param str path: The directory name on the device.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should be
+ executed as root.
+ :returns: boolean - True if path exists.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ """
+ path = posixpath.normpath(path)
+ return self.shell_bool('ls -a %s' % path, timeout=timeout, root=root)
+
+ def is_dir(self, path, timeout=None, root=False):
+ """Returns True if path is an existing directory on the device.
+
+ :param str path: The path on the device.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :returns: boolean - True if path exists on the device and is a
+ directory.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ """
+ path = posixpath.normpath(path)
+ return self.shell_bool('ls -a %s/' % path, timeout=timeout, root=root)
+
+ def is_file(self, path, timeout=None, root=False):
+ """Returns True if path is an existing file on the device.
+
+ :param str path: The file name on the device.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :returns: boolean - True if path exists on the device and is a
+ file.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ """
+ path = posixpath.normpath(path)
+ return (
+ self.exists(path, timeout=timeout, root=root) and
+ not self.is_dir(path, timeout=timeout, root=root))
+
+ def list_files(self, path, timeout=None, root=False):
+ """Return a list of files/directories contained in a directory
+ on the device.
+
+ :param str path: The directory name on the device.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :returns: list of files/directories contained in the directory.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ """
+ path = posixpath.normpath(path.strip())
+ data = []
+ if self.is_dir(path, timeout=timeout, root=root):
+ try:
+ data = self.shell_output("%s %s" % (self._ls, path),
+ timeout=timeout,
+ root=root).split('\r\n')
+ self._logger.debug('list_files: data: %s' % data)
+ except ADBError:
+ self._logger.error('Ignoring exception in ADBDevice.list_files\n%s' %
+ traceback.format_exc())
+ data[:] = [item for item in data if item]
+ self._logger.debug('list_files: %s' % data)
+ return data
+
+ def ls(self, path, recursive=False, timeout=None, root=False):
+ """Return a list of matching files/directories on the device.
+
+ The ls method emulates the behavior of the ls shell command.
+ It differs from the list_files method by supporting wild cards
+ and returning matches even if the path is not a directory and
+ by allowing a recursive listing.
+
+ ls /sdcard always returns /sdcard and not the contents of the
+ sdcard path. The ls method makes the behavior consistent with
+ others paths by adjusting /sdcard to /sdcard/. Note this is
+ also the case of other sdcard related paths such as
+ /storage/emulated/legacy but no adjustment is made in those
+ cases.
+
+ The ls method works around a Nexus 4 bug which prevents
+ recursive listing of directories on the sdcard unless the path
+ ends with "/*" by adjusting sdcard paths ending in "/" to end
+ with "/*". This adjustment is only made on official Nexus 4
+ builds with property ro.product.model "Nexus 4". Note that
+ this will fail to return any "hidden" files or directories
+ which begin with ".".
+
+ :param str path: The directory name on the device.
+ :param bool recursive: Flag specifying if a recursive listing
+ is to be returned. If recursive is False, the returned
+ matches will be relative to the path. If recursive is True,
+ the returned matches will be absolute paths.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :returns: list of files/directories contained in the directory.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ """
+ path = posixpath.normpath(path.strip())
+ parent = ''
+ entries = {}
+
+ if path == '/sdcard':
+ path += '/'
+
+ # Android 2.3 and later all appear to support ls -R however
+ # Nexus 4 does not perform a recursive search on the sdcard
+ # unless the path is a directory with * wild card.
+ if not recursive:
+ recursive_flag = ''
+ else:
+ recursive_flag = '-R'
+ if path.startswith('/sdcard') and path.endswith('/'):
+ model = self.shell_output('getprop ro.product.model',
+ timeout=timeout,
+ root=root)
+ if model == 'Nexus 4':
+ path += '*'
+ lines = self.shell_output('%s %s %s' % (self._ls, recursive_flag, path),
+ timeout=timeout,
+ root=root).split('\r\n')
+ for line in lines:
+ line = line.strip()
+ if not line:
+ parent = ''
+ continue
+ if line.endswith(':'): # This is a directory
+ parent = line.replace(':', '/')
+ entry = parent
+ # Remove earlier entry which is marked as a file.
+ if parent[:-1] in entries:
+ del entries[parent[:-1]]
+ elif parent:
+ entry = "%s%s" % (parent, line)
+ else:
+ entry = line
+ entries[entry] = 1
+ entry_list = entries.keys()
+ entry_list.sort()
+ return entry_list
+
+ def mkdir(self, path, parents=False, timeout=None, root=False):
+ """Create a directory on the device.
+
+ :param str path: The directory name on the device
+ to be created.
+ :param bool parents: Flag indicating if the parent directories are
+ also to be created. Think mkdir -p path.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ path = posixpath.normpath(path)
+ if parents:
+ if self._mkdir_p is None or self._mkdir_p:
+ # Use shell_bool to catch the possible
+ # non-zero exitcode if -p is not supported.
+ if self.shell_bool('mkdir -p %s' % path, timeout=timeout,
+ root=root):
+ self._mkdir_p = True
+ return
+ # mkdir -p is not supported. create the parent
+ # directories individually.
+ if not self.is_dir(posixpath.dirname(path), root=root):
+ parts = path.split('/')
+ name = "/"
+ for part in parts[:-1]:
+ if part != "":
+ name = posixpath.join(name, part)
+ if not self.is_dir(name, root=root):
+ # Use shell_output to allow any non-zero
+ # exitcode to raise an ADBError.
+ self.shell_output('mkdir %s' % name,
+ timeout=timeout, root=root)
+
+ # If parents is True and the directory does exist, we don't
+ # need to do anything. Otherwise we call mkdir. If the
+ # directory already exists or if it is a file instead of a
+ # directory, mkdir will fail and we will raise an ADBError.
+ if not parents or not self.is_dir(path, root=root):
+ self.shell_output('mkdir %s' % path, timeout=timeout, root=root)
+ if not self.is_dir(path, timeout=timeout, root=root):
+ raise ADBError('mkdir %s Failed' % path)
+
+ def push(self, local, remote, timeout=None):
+ """Pushes a file or directory to the device.
+
+ :param str local: The name of the local file or
+ directory name.
+ :param str remote: The name of the remote file or
+ directory name.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ # remove trailing /
+ local = os.path.normpath(local)
+ remote = os.path.normpath(remote)
+ copy_required = False
+ if self._adb_version >= '1.0.36' and \
+ os.path.isdir(local) and self.is_dir(remote):
+ # See do_sync_push in
+ # https://android.googlesource.com/platform/system/core/+/master/adb/file_sync_client.cpp
+ # Work around change in behavior in adb 1.0.36 where if
+ # the remote destination directory exists, adb push will
+ # copy the source directory *into* the destination
+ # directory otherwise it will copy the source directory
+ # *onto* the destination directory.
+ #
+ # If the destination directory does exist, push to its
+ # parent directory. If the source and destination leaf
+ # directory names are different, copy the source directory
+ # to a temporary directory with the same leaf name as the
+ # destination so that when we push to the parent, the
+ # source is copied onto the destination directory.
+ local_name = os.path.basename(local)
+ remote_name = os.path.basename(remote)
+ if local_name != remote_name:
+ copy_required = True
+ temp_parent = tempfile.mkdtemp()
+ new_local = os.path.join(temp_parent, remote_name)
+ dir_util.copy_tree(local, new_local)
+ local = new_local
+ remote = '/'.join(remote.rstrip('/').split('/')[:-1])
+ try:
+ self.command_output(["push", local, remote], timeout=timeout)
+ except:
+ raise
+ finally:
+ if copy_required:
+ shutil.rmtree(temp_parent)
+
+ def pull(self, remote, local, timeout=None):
+ """Pulls a file or directory from the device.
+
+ :param str remote: The path of the remote file or
+ directory.
+ :param str local: The path of the local file or
+ directory name.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ # remove trailing /
+ local = os.path.normpath(local)
+ remote = os.path.normpath(remote)
+ copy_required = False
+ original_local = local
+ if self._adb_version >= '1.0.36' and \
+ os.path.isdir(local) and self.is_dir(remote):
+ # See do_sync_pull in
+ # https://android.googlesource.com/platform/system/core/+/master/adb/file_sync_client.cpp
+ # Work around change in behavior in adb 1.0.36 where if
+ # the local destination directory exists, adb pull will
+ # copy the source directory *into* the destination
+ # directory otherwise it will copy the source directory
+ # *onto* the destination directory.
+ #
+ # If the destination directory does exist, pull to its
+ # parent directory. If the source and destination leaf
+ # directory names are different, pull the source directory
+ # into a temporary directory and then copy the temporary
+ # directory onto the destination.
+ local_name = os.path.basename(local)
+ remote_name = os.path.basename(remote)
+ if local_name != remote_name:
+ copy_required = True
+ temp_parent = tempfile.mkdtemp()
+ local = os.path.join(temp_parent, remote_name)
+ else:
+ local = '/'.join(local.rstrip('/').split('/')[:-1])
+ try:
+ self.command_output(["pull", remote, local], timeout=timeout)
+ except:
+ raise
+ finally:
+ if copy_required:
+ dir_util.copy_tree(local, original_local)
+ shutil.rmtree(temp_parent)
+
+ def rm(self, path, recursive=False, force=False, timeout=None, root=False):
+ """Delete files or directories on the device.
+
+ :param str path: The path of the remote file or directory.
+ :param bool recursive: Flag specifying if the command is
+ to be applied recursively to the target. Default is False.
+ :param bool force: Flag which if True will not raise an
+ error when attempting to delete a non-existent file. Default
+ is False.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ cmd = "rm"
+ if recursive:
+ cmd += " -r"
+ try:
+ self.shell_output("%s %s" % (cmd, path), timeout=timeout, root=root)
+ if self.is_file(path, timeout=timeout, root=root):
+ raise ADBError('rm("%s") failed to remove file.' % path)
+ except ADBError as e:
+ if not force and 'No such file or directory' in e.message:
+ raise
+
+ def rmdir(self, path, timeout=None, root=False):
+ """Delete empty directory on the device.
+
+ :param str path: The directory name on the device.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ self.shell_output("rmdir %s" % path, timeout=timeout, root=root)
+ if self.is_dir(path, timeout=timeout, root=root):
+ raise ADBError('rmdir("%s") failed to remove directory.' % path)
+
+ # Process management methods
+
+ def get_process_list(self, timeout=None):
+ """Returns list of tuples (pid, name, user) for running
+ processes on device.
+
+ :param timeout: The maximum time
+ in seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified,
+ the value set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: list of (pid, name, user) tuples for running processes
+ on the device.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ adb_process = None
+ try:
+ adb_process = self.shell("ps", timeout=timeout)
+ if adb_process.timedout:
+ raise ADBTimeoutError("%s" % adb_process)
+ elif adb_process.exitcode:
+ raise ADBError("%s" % adb_process)
+ # first line is the headers
+ header = adb_process.stdout_file.readline()
+ pid_i = -1
+ user_i = -1
+ els = header.split()
+ for i in range(len(els)):
+ item = els[i].lower()
+ if item == 'user':
+ user_i = i
+ elif item == 'pid':
+ pid_i = i
+ if user_i == -1 or pid_i == -1:
+ self._logger.error('get_process_list: %s' % header)
+ raise ADBError('get_process_list: Unknown format: %s: %s' % (
+ header, adb_process))
+ ret = []
+ line = adb_process.stdout_file.readline()
+ while line:
+ els = line.split()
+ try:
+ ret.append([int(els[pid_i]), els[-1], els[user_i]])
+ except ValueError:
+ self._logger.error('get_process_list: %s %s\n%s' % (
+ header, line, traceback.format_exc()))
+ raise ADBError('get_process_list: %s: %s: %s' % (
+ header, line, adb_process))
+ line = adb_process.stdout_file.readline()
+ self._logger.debug('get_process_list: %s' % ret)
+ return ret
+ finally:
+ if adb_process and isinstance(adb_process.stdout_file, file):
+ adb_process.stdout_file.close()
+
+ def kill(self, pids, sig=None, attempts=3, wait=5,
+ timeout=None, root=False):
+ """Kills processes on the device given a list of process ids.
+
+ :param list pids: process ids to be killed.
+ :param sig: signal to be sent to the process.
+ :type sig: integer or None
+ :param integer attempts: number of attempts to try to
+ kill the processes.
+ :param integer wait: number of seconds to wait after each attempt.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ pid_list = [str(pid) for pid in pids]
+ for attempt in range(attempts):
+ args = ["kill"]
+ if sig:
+ args.append("-%d" % sig)
+ args.extend(pid_list)
+ try:
+ self.shell_output(' '.join(args), timeout=timeout, root=root)
+ except ADBError as e:
+ if 'No such process' not in e.message:
+ raise
+ pid_set = set(pid_list)
+ current_pid_set = set([str(proc[0]) for proc in
+ self.get_process_list(timeout=timeout)])
+ pid_list = list(pid_set.intersection(current_pid_set))
+ if not pid_list:
+ break
+ self._logger.debug("Attempt %d of %d to kill processes %s failed" %
+ (attempt + 1, attempts, pid_list))
+ time.sleep(wait)
+
+ if pid_list:
+ raise ADBError('kill: processes %s not killed' % pid_list)
+
+ def pkill(self, appname, sig=None, attempts=3, wait=5,
+ timeout=None, root=False):
+ """Kills a processes on the device matching a name.
+
+ :param str appname: The app name of the process to
+ be killed. Note that only the first 75 characters of the
+ process name are significant.
+ :param sig: optional signal to be sent to the process.
+ :type sig: integer or None
+ :param integer attempts: number of attempts to try to
+ kill the processes.
+ :param integer wait: number of seconds to wait after each attempt.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should
+ be executed as root.
+
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ procs = self.get_process_list(timeout=timeout)
+ # limit the comparion to the first 75 characters due to a
+ # limitation in processname length in android.
+ pids = [proc[0] for proc in procs if proc[1] == appname[:75]]
+ if not pids:
+ return
+
+ try:
+ self.kill(pids, sig, attempts=attempts, wait=wait,
+ timeout=timeout, root=root)
+ except ADBError as e:
+ if self.process_exist(appname, timeout=timeout):
+ raise e
+
+ def process_exist(self, process_name, timeout=None):
+ """Returns True if process with name process_name is running on
+ device.
+
+ :param str process_name: The name of the process
+ to check. Note that only the first 75 characters of the
+ process name are significant.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: boolean - True if process exists.
+
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ if not isinstance(process_name, basestring):
+ raise ADBError("Process name %s is not a string" % process_name)
+
+ # Filter out extra spaces.
+ parts = [x for x in process_name.split(' ') if x != '']
+ process_name = ' '.join(parts)
+
+ # Filter out the quoted env string if it exists
+ # ex: '"name=value;name2=value2;etc=..." process args' -> 'process args'
+ parts = process_name.split('"')
+ if len(parts) > 2:
+ process_name = ' '.join(parts[2:]).strip()
+
+ pieces = process_name.split(' ')
+ parts = pieces[0].split('/')
+ app = parts[-1]
+
+ proc_list = self.get_process_list(timeout=timeout)
+ if not proc_list:
+ return False
+
+ for proc in proc_list:
+ proc_name = proc[1].split('/')[-1]
+ # limit the comparion to the first 75 characters due to a
+ # limitation in processname length in android.
+ if proc_name == app[:75]:
+ return True
+ return False
+
+ def cp(self, source, destination, recursive=False, timeout=None,
+ root=False):
+ """Copies a file or directory on the device.
+
+ :param source: string containing the path of the source file or
+ directory.
+ :param destination: string containing the path of the destination file
+ or directory.
+ :param recursive: optional boolean indicating if a recursive copy is to
+ be performed. Required if the source is a directory. Defaults to
+ False. Think cp -R source destination.
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ source = posixpath.normpath(source)
+ destination = posixpath.normpath(destination)
+ if self._have_cp:
+ r = '-R' if recursive else ''
+ self.shell_output('cp %s %s %s' % (r, source, destination),
+ timeout=timeout, root=root)
+ return
+
+ # Emulate cp behavior depending on if source and destination
+ # already exists and whether they are a directory or file.
+ if not self.exists(source, timeout=timeout, root=root):
+ raise ADBError("cp: can't stat '%s': No such file or directory" %
+ source)
+
+ if self.is_file(source, timeout=timeout, root=root):
+ if self.is_dir(destination, timeout=timeout, root=root):
+ # Copy the source file into the destination directory
+ destination = posixpath.join(destination,
+ posixpath.basename(source))
+ self.shell_output('dd if=%s of=%s' % (source, destination),
+ timeout=timeout, root=root)
+ return
+
+ if self.is_file(destination, timeout=timeout, root=root):
+ raise ADBError('cp: %s: Not a directory' % destination)
+
+ if not recursive:
+ raise ADBError("cp: omitting directory '%s'" % source)
+
+ if self.is_dir(destination, timeout=timeout, root=root):
+ # Copy the source directory into the destination directory.
+ destination_dir = posixpath.join(destination,
+ posixpath.basename(source))
+ else:
+ # Copy the contents of the source directory into the
+ # destination directory.
+ destination_dir = destination
+
+ try:
+ # Do not create parent directories since cp does not.
+ self.mkdir(destination_dir, timeout=timeout, root=root)
+ except ADBError as e:
+ if 'File exists' not in e.message:
+ raise
+
+ for i in self.list_files(source, timeout=timeout, root=root):
+ self.cp(posixpath.join(source, i),
+ posixpath.join(destination_dir, i),
+ recursive=recursive,
+ timeout=timeout, root=root)
+
+ def mv(self, source, destination, timeout=None, root=False):
+ """Moves a file or directory on the device.
+
+ :param source: string containing the path of the source file or
+ directory.
+ :param destination: string containing the path of the destination file
+ or directory.
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBRootError
+ * ADBError
+ """
+ source = posixpath.normpath(source)
+ destination = posixpath.normpath(destination)
+ self.shell_output('mv %s %s' % (source, destination), timeout=timeout,
+ root=root)
+
+ def reboot(self, timeout=None):
+ """Reboots the device.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBError
+
+ reboot() reboots the device, issues an adb wait-for-device in order to
+ wait for the device to complete rebooting, then calls is_device_ready()
+ to determine if the device has completed booting.
+ """
+ self.command_output(["reboot"], timeout=timeout)
+ # command_output automatically inserts a 'wait-for-device'
+ # argument to adb. Issuing an empty command is the same as adb
+ # -s <device> wait-for-device. We don't send an explicit
+ # 'wait-for-device' since that would add duplicate
+ # 'wait-for-device' arguments which is an error in newer
+ # versions of adb.
+ self.command_output([], timeout=timeout)
+ self._check_adb_root(timeout=timeout)
+ return self.is_device_ready(timeout=timeout)
+
+ @abstractmethod
+ def is_device_ready(self, timeout=None):
+ """Abstract class that returns True if the device is ready.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ return
+
+ @abstractmethod
+ def get_battery_percentage(self, timeout=None):
+ """Abstract class that returns the battery charge as a percentage.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :returns: battery charge as a percentage.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ return
+
+ def get_info(self, directive=None, timeout=None):
+ """
+ Returns a dictionary of information strings about the device.
+
+ :param directive: information you want to get. Options are:
+ - `battery` - battery charge as a percentage
+ - `disk` - total, free, available bytes on disk
+ - `id` - unique id of the device
+ - `os` - name of the os
+ - `process` - list of running processes (same as ps)
+ - `systime` - system time of the device
+ - `uptime` - uptime of the device
+
+ If `directive` is `None`, will return all available information
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ directives = ['battery', 'disk', 'id', 'os', 'process', 'systime',
+ 'uptime']
+
+ if directive in directives:
+ directives = [directive]
+
+ info = {}
+ if 'battery' in directives:
+ info['battery'] = self.get_battery_percentage(timeout=timeout)
+ if 'disk' in directives:
+ info['disk'] = self.shell_output('df /data /system /sdcard',
+ timeout=timeout).splitlines()
+ if 'id' in directives:
+ info['id'] = self.command_output(['get-serialno'], timeout=timeout)
+ if 'os' in directives:
+ info['os'] = self.shell_output('getprop ro.build.display.id',
+ timeout=timeout)
+ if 'process' in directives:
+ ps = self.shell_output('ps', timeout=timeout)
+ info['process'] = ps.splitlines()
+ if 'systime' in directives:
+ info['systime'] = self.shell_output('date', timeout=timeout)
+ if 'uptime' in directives:
+ uptime = self.shell_output('uptime', timeout=timeout)
+ if uptime:
+ m = re.match(r'up time: ((\d+) days, )*(\d{2}):(\d{2}):(\d{2})',
+ uptime)
+ if m:
+ uptime = '%d days %d hours %d minutes %d seconds' % tuple(
+ [int(g or 0) for g in m.groups()[1:]])
+ info['uptime'] = uptime
+ return info
diff --git a/testing/mozbase/mozdevice/mozdevice/adb_android.py b/testing/mozbase/mozdevice/mozdevice/adb_android.py
new file mode 100644
index 000000000..bf5fffc0e
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/adb_android.py
@@ -0,0 +1,493 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import time
+
+from abc import ABCMeta
+
+import version_codes
+
+from adb import ADBDevice, ADBError
+
+
+class ADBAndroid(ADBDevice):
+ """ADBAndroid implements :class:`ADBDevice` providing Android-specific
+ functionality.
+
+ ::
+
+ from mozdevice import ADBAndroid
+
+ adbdevice = ADBAndroid()
+ print adbdevice.list_files("/mnt/sdcard")
+ if adbdevice.process_exist("org.mozilla.fennec"):
+ print "Fennec is running"
+ """
+ __metaclass__ = ABCMeta
+
+ def __init__(self,
+ device=None,
+ adb='adb',
+ adb_host=None,
+ adb_port=None,
+ test_root='',
+ logger_name='adb',
+ timeout=300,
+ verbose=False,
+ device_ready_retry_wait=20,
+ device_ready_retry_attempts=3):
+ """Initializes the ADBAndroid object.
+
+ :param device: When a string is passed, it is interpreted as the
+ device serial number. This form is not compatible with
+ devices containing a ":" in the serial; in this case
+ ValueError will be raised.
+ When a dictionary is passed it must have one or both of
+ the keys "device_serial" and "usb". This is compatible
+ with the dictionaries in the list returned by
+ ADBHost.devices(). If the value of device_serial is a
+ valid serial not containing a ":" it will be used to
+ identify the device, otherwise the value of the usb key,
+ prefixed with "usb:" is used.
+ If None is passed and there is exactly one device attached
+ to the host, that device is used. If there is more than one
+ device attached, ValueError is raised. If no device is
+ attached the constructor will block until a device is
+ attached or the timeout is reached.
+ :type device: dict, str or None
+ :param adb_host: host of the adb server to connect to.
+ :type adb_host: str or None
+ :param adb_port: port of the adb server to connect to.
+ :type adb_port: integer or None
+ :param str logger_name: logging logger name. Defaults to 'adb'.
+ :param integer device_ready_retry_wait: number of seconds to wait
+ between attempts to check if the device is ready after a
+ reboot.
+ :param integer device_ready_retry_attempts: number of attempts when
+ checking if a device is ready.
+
+ :raises: * ADBError
+ * ADBTimeoutError
+ * ValueError
+ """
+ ADBDevice.__init__(self, device=device, adb=adb,
+ adb_host=adb_host, adb_port=adb_port,
+ test_root=test_root,
+ logger_name=logger_name, timeout=timeout,
+ verbose=verbose,
+ device_ready_retry_wait=device_ready_retry_wait,
+ device_ready_retry_attempts=device_ready_retry_attempts)
+ # https://source.android.com/devices/tech/security/selinux/index.html
+ # setenforce
+ # usage: setenforce [ Enforcing | Permissive | 1 | 0 ]
+ # getenforce returns either Enforcing or Permissive
+
+ try:
+ self.selinux = True
+ if self.shell_output('getenforce', timeout=timeout) != 'Permissive':
+ self._logger.info('Setting SELinux Permissive Mode')
+ self.shell_output("setenforce Permissive", timeout=timeout, root=True)
+ except ADBError:
+ self.selinux = False
+
+ self.version = int(self.shell_output("getprop ro.build.version.sdk",
+ timeout=timeout))
+
+ def reboot(self, timeout=None):
+ """Reboots the device.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBError
+
+ reboot() reboots the device, issues an adb wait-for-device in order to
+ wait for the device to complete rebooting, then calls is_device_ready()
+ to determine if the device has completed booting.
+
+ If the device supports running adbd as root, adbd will be
+ restarted running as root. Then, if the device supports
+ SELinux, setenforce Permissive will be called to change
+ SELinux to permissive. This must be done after adbd is
+ restarted in order for the SELinux Permissive setting to
+ persist.
+
+ """
+ ready = ADBDevice.reboot(self, timeout=timeout)
+ self._check_adb_root(timeout=timeout)
+ return ready
+
+ # Informational methods
+
+ def get_battery_percentage(self, timeout=None):
+ """Returns the battery charge as a percentage.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :type timeout: integer or None
+ :returns: battery charge as a percentage.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ level = None
+ scale = None
+ percentage = 0
+ cmd = "dumpsys battery"
+ re_parameter = re.compile(r'\s+(\w+):\s+(\d+)')
+ lines = self.shell_output(cmd, timeout=timeout).split('\r')
+ for line in lines:
+ match = re_parameter.match(line)
+ if match:
+ parameter = match.group(1)
+ value = match.group(2)
+ if parameter == 'level':
+ level = float(value)
+ elif parameter == 'scale':
+ scale = float(value)
+ if parameter is not None and scale is not None:
+ percentage = 100.0 * level / scale
+ break
+ return percentage
+
+ # System control methods
+
+ def is_device_ready(self, timeout=None):
+ """Checks if a device is ready for testing.
+
+ This method uses the android only package manager to check for
+ readiness.
+
+ :param timeout: The maximum time
+ in seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ # command_output automatically inserts a 'wait-for-device'
+ # argument to adb. Issuing an empty command is the same as adb
+ # -s <device> wait-for-device. We don't send an explicit
+ # 'wait-for-device' since that would add duplicate
+ # 'wait-for-device' arguments which is an error in newer
+ # versions of adb.
+ self.command_output([], timeout=timeout)
+ pm_error_string = "Error: Could not access the Package Manager"
+ pm_list_commands = ["packages", "permission-groups", "permissions",
+ "instrumentation", "features", "libraries"]
+ ready_path = os.path.join(self.test_root, "ready")
+ for attempt in range(self._device_ready_retry_attempts):
+ failure = 'Unknown failure'
+ success = True
+ try:
+ state = self.get_state(timeout=timeout)
+ if state != 'device':
+ failure = "Device state: %s" % state
+ success = False
+ else:
+ if (self.selinux and self.shell_output('getenforce',
+ timeout=timeout) != 'Permissive'):
+ self._logger.info('Setting SELinux Permissive Mode')
+ self.shell_output("setenforce Permissive", timeout=timeout, root=True)
+ if self.is_dir(ready_path, timeout=timeout, root=True):
+ self.rmdir(ready_path, timeout=timeout, root=True)
+ self.mkdir(ready_path, timeout=timeout, root=True)
+ self.rmdir(ready_path, timeout=timeout, root=True)
+ # Invoke the pm list commands to see if it is up and
+ # running.
+ for pm_list_cmd in pm_list_commands:
+ data = self.shell_output("pm list %s" % pm_list_cmd,
+ timeout=timeout)
+ if pm_error_string in data:
+ failure = data
+ success = False
+ break
+ except ADBError as e:
+ success = False
+ failure = e.message
+
+ if not success:
+ self._logger.debug('Attempt %s of %s device not ready: %s' % (
+ attempt + 1, self._device_ready_retry_attempts,
+ failure))
+ time.sleep(self._device_ready_retry_wait)
+
+ return success
+
+ def power_on(self, timeout=None):
+ """Sets the device's power stayon value.
+
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ try:
+ self.shell_output('svc power stayon true',
+ timeout=timeout,
+ root=True)
+ except ADBError as e:
+ # Executing this via adb shell errors, but not interactively.
+ # Any other exitcode is a real error.
+ if 'exitcode: 137' not in e.message:
+ raise
+ self._logger.warning('Unable to set power stayon true: %s' % e)
+
+ # Application management methods
+
+ def install_app(self, apk_path, timeout=None):
+ """Installs an app on the device.
+
+ :param str apk_path: The apk file name to be installed.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ cmd = ["install"]
+ if self.version >= version_codes.M:
+ cmd.append("-g")
+ cmd.append(apk_path)
+ data = self.command_output(cmd, timeout=timeout)
+ if data.find('Success') == -1:
+ raise ADBError("install failed for %s. Got: %s" %
+ (apk_path, data))
+
+ def is_app_installed(self, app_name, timeout=None):
+ """Returns True if an app is installed on the device.
+
+ :param str app_name: The name of the app to be checked.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ pm_error_string = 'Error: Could not access the Package Manager'
+ data = self.shell_output("pm list package %s" % app_name, timeout=timeout)
+ if pm_error_string in data:
+ raise ADBError(pm_error_string)
+ if app_name not in data:
+ return False
+ return True
+
+ def launch_application(self, app_name, activity_name, intent, url=None,
+ extras=None, wait=True, fail_if_running=True,
+ timeout=None):
+ """Launches an Android application
+
+ :param str app_name: Name of application (e.g. `com.android.chrome`)
+ :param str activity_name: Name of activity to launch (e.g. `.Main`)
+ :param str intent: Intent to launch application with
+ :param url: URL to open
+ :type url: str or None
+ :param extras: Extra arguments for application.
+ :type extras: dict or None
+ :param bool wait: If True, wait for application to start before
+ returning.
+ :param bool fail_if_running: Raise an exception if instance of
+ application is already running.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ # If fail_if_running is True, we throw an exception here. Only one
+ # instance of an application can be running at once on Android,
+ # starting a new instance may not be what we want depending on what
+ # we want to do
+ if fail_if_running and self.process_exist(app_name, timeout=timeout):
+ raise ADBError("Only one instance of an application may be running "
+ "at once")
+
+ acmd = ["am", "start"] + \
+ ["-W" if wait else '', "-n", "%s/%s" % (app_name, activity_name)]
+
+ if intent:
+ acmd.extend(["-a", intent])
+
+ if extras:
+ for (key, val) in extras.iteritems():
+ if isinstance(val, int):
+ extra_type_param = "--ei"
+ elif isinstance(val, bool):
+ extra_type_param = "--ez"
+ else:
+ extra_type_param = "--es"
+ acmd.extend([extra_type_param, str(key), str(val)])
+
+ if url:
+ acmd.extend(["-d", url])
+
+ cmd = self._escape_command_line(acmd)
+ self.shell_output(cmd, timeout=timeout)
+
+ def launch_fennec(self, app_name, intent="android.intent.action.VIEW",
+ moz_env=None, extra_args=None, url=None, wait=True,
+ fail_if_running=True, timeout=None):
+ """Convenience method to launch Fennec on Android with various
+ debugging arguments
+
+ :param str app_name: Name of fennec application (e.g.
+ `org.mozilla.fennec`)
+ :param str intent: Intent to launch application.
+ :param moz_env: Mozilla specific environment to pass into
+ application.
+ :type moz_env: str or None
+ :param extra_args: Extra arguments to be parsed by fennec.
+ :type extra_args: str or None
+ :param url: URL to open
+ :type url: str or None
+ :param bool wait: If True, wait for application to start before
+ returning.
+ :param bool fail_if_running: Raise an exception if instance of
+ application is already running.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ extras = {}
+
+ if moz_env:
+ # moz_env is expected to be a dictionary of environment variables:
+ # Fennec itself will set them when launched
+ for (env_count, (env_key, env_val)) in enumerate(moz_env.iteritems()):
+ extras["env" + str(env_count)] = env_key + "=" + env_val
+
+ # Additional command line arguments that fennec will read and use (e.g.
+ # with a custom profile)
+ if extra_args:
+ extras['args'] = " ".join(extra_args)
+
+ self.launch_application(app_name, "org.mozilla.gecko.BrowserApp",
+ intent, url=url, extras=extras,
+ wait=wait, fail_if_running=fail_if_running,
+ timeout=timeout)
+
+ def stop_application(self, app_name, timeout=None, root=False):
+ """Stops the specified application
+
+ For Android 3.0+, we use the "am force-stop" to do this, which
+ is reliable and does not require root. For earlier versions of
+ Android, we simply try to manually kill the processes started
+ by the app repeatedly until none is around any more. This is
+ less reliable and does require root.
+
+ :param str app_name: Name of application (e.g. `com.android.chrome`)
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :param bool root: Flag specifying if the command should be
+ executed as root.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ if self.version >= version_codes.HONEYCOMB:
+ self.shell_output("am force-stop %s" % app_name,
+ timeout=timeout, root=root)
+ else:
+ num_tries = 0
+ max_tries = 5
+ while self.process_exist(app_name, timeout=timeout):
+ if num_tries > max_tries:
+ raise ADBError("Couldn't successfully kill %s after %s "
+ "tries" % (app_name, max_tries))
+ self.pkill(app_name, timeout=timeout, root=root)
+ num_tries += 1
+
+ # sleep for a short duration to make sure there are no
+ # additional processes in the process of being launched
+ # (this is not 100% guaranteed to work since it is inherently
+ # racey, but it's the best we can do)
+ time.sleep(1)
+
+ def uninstall_app(self, app_name, reboot=False, timeout=None):
+ """Uninstalls an app on the device.
+
+ :param str app_name: The name of the app to be
+ uninstalled.
+ :param bool reboot: Flag indicating that the device should
+ be rebooted after the app is uninstalled. No reboot occurs
+ if the app is not installed.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ if self.is_app_installed(app_name, timeout=timeout):
+ data = self.command_output(["uninstall", app_name], timeout=timeout)
+ if data.find('Success') == -1:
+ self._logger.debug('uninstall_app failed: %s' % data)
+ raise ADBError("uninstall failed for %s. Got: %s" % (app_name, data))
+ if reboot:
+ self.reboot(timeout=timeout)
+
+ def update_app(self, apk_path, timeout=None):
+ """Updates an app on the device and reboots.
+
+ :param str apk_path: The apk file name to be
+ updated.
+ :param timeout: The maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :type timeout: integer or None
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ cmd = ["install", "-r"]
+ if self.version >= version_codes.M:
+ cmd.append("-g")
+ cmd.append(apk_path)
+ output = self.command_output(cmd, timeout=timeout)
+ self.reboot(timeout=timeout)
+ return output
diff --git a/testing/mozbase/mozdevice/mozdevice/adb_b2g.py b/testing/mozbase/mozdevice/mozdevice/adb_b2g.py
new file mode 100644
index 000000000..3280e6172
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/adb_b2g.py
@@ -0,0 +1,122 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import traceback
+
+import mozfile
+
+from adb import ADBDevice, ADBError
+
+
+class ADBB2G(ADBDevice):
+ """ADBB2G implements :class:`ADBDevice` providing B2G-specific
+ functionality.
+
+ ::
+
+ from mozdevice import ADBB2G
+
+ adbdevice = ADBB2G()
+ print adbdevice.list_files("/mnt/sdcard")
+ if adbdevice.process_exist("b2g"):
+ print "B2G is running"
+ """
+
+ def get_battery_percentage(self, timeout=None):
+ """Returns the battery charge as a percentage.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :returns: battery charge as a percentage.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ with mozfile.NamedTemporaryFile() as tf:
+ self.pull('/sys/class/power_supply/battery/capacity', tf.name,
+ timeout=timeout)
+ try:
+ with open(tf.name) as tf2:
+ return tf2.read().splitlines()[0]
+ except Exception as e:
+ raise ADBError(traceback.format_exception_only(
+ type(e), e)[0].strip())
+
+ def get_memory_total(self, timeout=None):
+ """Returns the total memory available with units.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADBDevice constructor is used.
+ :returns: memory total with units.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ meminfo = {}
+ with mozfile.NamedTemporaryFile() as tf:
+ self.pull('/proc/meminfo', tf.name, timeout=timeout)
+ try:
+ with open(tf.name) as tf2:
+ for line in tf2.read().splitlines():
+ key, value = line.split(':')
+ meminfo[key] = value.strip()
+ except Exception as e:
+ raise ADBError(traceback.format_exception_only(
+ type(e), e)[0].strip())
+ return meminfo['MemTotal']
+
+ def get_info(self, directive=None, timeout=None):
+ """
+ Returns a dictionary of information strings about the device.
+
+ :param directive: information you want to get. Options are:
+ - `battery` - battery charge as a percentage
+ - `disk` - total, free, available bytes on disk
+ - `id` - unique id of the device
+ - `memtotal` - total memory available on the device
+ - `os` - name of the os
+ - `process` - list of running processes (same as ps)
+ - `systime` - system time of the device
+ - `uptime` - uptime of the device
+
+ If `directive` is `None`, will return all available information
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ info = super(ADBB2G, self).get_info(directive=directive,
+ timeout=timeout)
+
+ directives = ['memtotal']
+ if directive in directives:
+ directives = [directive]
+
+ if 'memtotal' in directives:
+ info['memtotal'] = self.get_memory_total(timeout=timeout)
+ return info
+
+ def is_device_ready(self, timeout=None):
+ """Returns True if the device is ready.
+
+ :param timeout: optional integer specifying the maximum time in
+ seconds for any spawned adb process to complete before
+ throwing an ADBTimeoutError.
+ This timeout is per adb call. The total time spent
+ may exceed this value. If it is not specified, the value
+ set in the ADB constructor is used.
+ :raises: * ADBTimeoutError
+ * ADBError
+ """
+ return self.shell_bool('ls /sbin', timeout=timeout)
diff --git a/testing/mozbase/mozdevice/mozdevice/devicemanager.py b/testing/mozbase/mozdevice/mozdevice/devicemanager.py
new file mode 100644
index 000000000..de87735ef
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/devicemanager.py
@@ -0,0 +1,674 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import mozlog
+import logging
+import os
+import posixpath
+import re
+import struct
+import StringIO
+import zlib
+
+from functools import wraps
+
+
+class DMError(Exception):
+ "generic devicemanager exception."
+
+ def __init__(self, msg='', fatal=False):
+ self.msg = msg
+ self.fatal = fatal
+
+ def __str__(self):
+ return self.msg
+
+
+def abstractmethod(method):
+ line = method.func_code.co_firstlineno
+ filename = method.func_code.co_filename
+
+ @wraps(method)
+ def not_implemented(*args, **kwargs):
+ raise NotImplementedError('Abstract method %s at File "%s", line %s '
+ 'should be implemented by a concrete class' %
+ (repr(method), filename, line))
+ return not_implemented
+
+
+class DeviceManager(object):
+ """
+ Represents a connection to a device. Once an implementation of this class
+ is successfully instantiated, you may do things like list/copy files to
+ the device, launch processes on the device, and install or remove
+ applications from the device.
+
+ Never instantiate this class directly! Instead, instantiate an
+ implementation of it like DeviceManagerADB or DeviceManagerSUT.
+ """
+
+ _logcatNeedsRoot = True
+ default_timeout = 300
+ short_timeout = 30
+
+ def __init__(self, logLevel=None, deviceRoot=None):
+ try:
+ self._logger = mozlog.get_default_logger(component="mozdevice")
+ if not self._logger: # no global structured logger, fall back to reg logging
+ self._logger = mozlog.unstructured.getLogger("mozdevice")
+ if logLevel is not None:
+ self._logger.setLevel(logLevel)
+ except AttributeError:
+ # Structured logging doesn't work on Python 2.6
+ self._logger = None
+ self._logLevel = logLevel
+ self._remoteIsWin = None
+ self._isDeviceRootSetup = False
+ self._deviceRoot = deviceRoot
+
+ def _log(self, data):
+ """
+ This helper function is called by ProcessHandler to log
+ the output produced by processes
+ """
+ self._logger.debug(data)
+
+ @property
+ def remoteIsWin(self):
+ if self._remoteIsWin is None:
+ self._remoteIsWin = self.getInfo("os")["os"][0] == "windows"
+ return self._remoteIsWin
+
+ @property
+ def logLevel(self):
+ return self._logLevel
+
+ @logLevel.setter
+ def logLevel_setter(self, newLogLevel):
+ self._logLevel = newLogLevel
+ self._logger.setLevel(self._logLevel)
+
+ @property
+ def debug(self):
+ self._logger.warning("dm.debug is deprecated. Use logLevel.")
+ levels = {logging.DEBUG: 5, logging.INFO: 3, logging.WARNING: 2,
+ logging.ERROR: 1, logging.CRITICAL: 0}
+ return levels[self.logLevel]
+
+ @debug.setter
+ def debug_setter(self, newDebug):
+ self._logger.warning("dm.debug is deprecated. Use logLevel.")
+ newDebug = 5 if newDebug > 5 else newDebug # truncate >=5 to 5
+ levels = {5: logging.DEBUG, 3: logging.INFO, 2: logging.WARNING,
+ 1: logging.ERROR, 0: logging.CRITICAL}
+ self.logLevel = levels[newDebug]
+
+ @abstractmethod
+ def getInfo(self, directive=None):
+ """
+ Returns a dictionary of information strings about the device.
+
+ :param directive: information you want to get. Options are:
+
+ - `os` - name of the os
+ - `id` - unique id of the device
+ - `uptime` - uptime of the device
+ - `uptimemillis` - uptime of the device in milliseconds
+ (NOT supported on all implementations)
+ - `systime` - system time of the device
+ - `screen` - screen resolution
+ - `memory` - memory stats
+ - `memtotal` - total memory available on the device, for example 927208 kB
+ - `process` - list of running processes (same as ps)
+ - `disk` - total, free, available bytes on disk
+ - `power` - power status (charge, battery temp)
+ - `temperature` - device temperature
+
+ If `directive` is `None`, will return all available information
+ """
+
+ @abstractmethod
+ def getCurrentTime(self):
+ """
+ Returns device time in milliseconds since the epoch.
+ """
+
+ def getIP(self, interfaces=['eth0', 'wlan0']):
+ """
+ Returns the IP of the device, or None if no connection exists.
+ """
+ for interface in interfaces:
+ match = re.match(r"%s: ip (\S+)" % interface,
+ self.shellCheckOutput(['ifconfig', interface],
+ timeout=self.short_timeout))
+ if match:
+ return match.group(1)
+
+ def recordLogcat(self):
+ """
+ Clears the logcat file making it easier to view specific events.
+ """
+ # TODO: spawn this off in a separate thread/process so we can collect all
+ # the logcat information
+
+ # Right now this is just clearing the logcat so we can only see what
+ # happens after this call.
+ self.shellCheckOutput(['/system/bin/logcat', '-c'], root=self._logcatNeedsRoot,
+ timeout=self.short_timeout)
+
+ def getLogcat(self, filterSpecs=["dalvikvm:I", "ConnectivityService:S",
+ "WifiMonitor:S", "WifiStateTracker:S",
+ "wpa_supplicant:S", "NetworkStateTracker:S"],
+ format="time",
+ filterOutRegexps=[]):
+ """
+ Returns the contents of the logcat file as a list of
+ '\n' terminated strings
+ """
+ cmdline = ["/system/bin/logcat", "-v", format, "-d"] + filterSpecs
+ output = self.shellCheckOutput(cmdline,
+ root=self._logcatNeedsRoot,
+ timeout=self.short_timeout)
+ lines = output.replace('\r\n', '\n').splitlines(True)
+
+ for regex in filterOutRegexps:
+ lines = [line for line in lines if not re.search(regex, line)]
+
+ return lines
+
+ def saveScreenshot(self, filename):
+ """
+ Takes a screenshot of what's being display on the device. Uses
+ "screencap" on newer (Android 3.0+) devices (and some older ones with
+ the functionality backported). This function also works on B2G.
+
+ Throws an exception on failure. This will always fail on devices
+ without the screencap utility.
+ """
+ screencap = '/system/bin/screencap'
+ if not self.fileExists(screencap):
+ raise DMError("Unable to capture screenshot on device: no screencap utility")
+
+ with open(filename, 'w') as pngfile:
+ # newer versions of screencap can write directly to a png, but some
+ # older versions can't
+ tempScreenshotFile = self.deviceRoot + "/ss-dm.tmp"
+ self.shellCheckOutput(["sh", "-c", "%s > %s" %
+ (screencap, tempScreenshotFile)],
+ root=True)
+ buf = self.pullFile(tempScreenshotFile)
+ width = int(struct.unpack("I", buf[0:4])[0])
+ height = int(struct.unpack("I", buf[4:8])[0])
+ with open(filename, 'w') as pngfile:
+ pngfile.write(self._writePNG(buf[12:], width, height))
+ self.removeFile(tempScreenshotFile)
+
+ @abstractmethod
+ def pushFile(self, localFilename, remoteFilename, retryLimit=1, createDir=True):
+ """
+ Copies localname from the host to destname on the device.
+ """
+
+ @abstractmethod
+ def pushDir(self, localDirname, remoteDirname, retryLimit=1, timeout=None):
+ """
+ Push local directory from host to remote directory on the device,
+ """
+
+ @abstractmethod
+ def pullFile(self, remoteFilename, offset=None, length=None):
+ """
+ Returns contents of remoteFile using the "pull" command.
+
+ :param remoteFilename: Path to file to pull from remote device.
+ :param offset: Offset in bytes from which to begin reading (optional)
+ :param length: Number of bytes to read (optional)
+ """
+
+ @abstractmethod
+ def getFile(self, remoteFilename, localFilename):
+ """
+ Copy file from remote device to local file on host.
+ """
+
+ @abstractmethod
+ def getDirectory(self, remoteDirname, localDirname, checkDir=True):
+ """
+ Copy directory structure from device (remoteDirname) to host (localDirname).
+ """
+
+ @abstractmethod
+ def validateFile(self, remoteFilename, localFilename):
+ """
+ Returns True if a file on the remote device has the same md5 hash as a local one.
+ """
+
+ def validateDir(self, localDirname, remoteDirname):
+ """
+ Returns True if remoteDirname on device is same as localDirname on host.
+ """
+
+ self._logger.info("validating directory: %s to %s" % (localDirname, remoteDirname))
+ for root, dirs, files in os.walk(localDirname):
+ parts = root.split(localDirname)
+ for f in files:
+ remoteRoot = remoteDirname + '/' + parts[1]
+ remoteRoot = remoteRoot.replace('/', '/')
+ if (parts[1] == ""):
+ remoteRoot = remoteDirname
+ remoteName = remoteRoot + '/' + f
+ if (self.validateFile(remoteName, os.path.join(root, f)) is not True):
+ return False
+ return True
+
+ @abstractmethod
+ def mkDir(self, remoteDirname):
+ """
+ Creates a single directory on the device file system.
+ """
+
+ def mkDirs(self, filename):
+ """
+ Make directory structure on the device.
+
+ WARNING: does not create last part of the path. For example, if asked to
+ create `/mnt/sdcard/foo/bar/baz`, it will only create `/mnt/sdcard/foo/bar`
+ """
+ filename = posixpath.normpath(filename)
+ containing = posixpath.dirname(filename)
+ if not self.dirExists(containing):
+ parts = filename.split('/')
+ name = "/" if not self.remoteIsWin else parts.pop(0)
+ for part in parts[:-1]:
+ if part != "":
+ name = posixpath.join(name, part)
+ self.mkDir(name) # mkDir will check previous existence
+
+ @abstractmethod
+ def dirExists(self, dirpath):
+ """
+ Returns whether dirpath exists and is a directory on the device file system.
+ """
+
+ @abstractmethod
+ def fileExists(self, filepath):
+ """
+ Return whether filepath exists on the device file system,
+ regardless of file type.
+ """
+
+ @abstractmethod
+ def listFiles(self, rootdir):
+ """
+ Lists files on the device rootdir.
+
+ Returns array of filenames, ['file1', 'file2', ...]
+ """
+
+ @abstractmethod
+ def removeFile(self, filename):
+ """
+ Removes filename from the device.
+ """
+
+ @abstractmethod
+ def removeDir(self, remoteDirname):
+ """
+ Does a recursive delete of directory on the device: rm -Rf remoteDirname.
+ """
+
+ @abstractmethod
+ def moveTree(self, source, destination):
+ """
+ Does a move of the file or directory on the device.
+
+ :param source: Path to the original file or directory
+ :param destination: Path to the destination file or directory
+ """
+
+ @abstractmethod
+ def copyTree(self, source, destination):
+ """
+ Does a copy of the file or directory on the device.
+
+ :param source: Path to the original file or directory
+ :param destination: Path to the destination file or directory
+ """
+
+ @abstractmethod
+ def chmodDir(self, remoteDirname, mask="777"):
+ """
+ Recursively changes file permissions in a directory.
+ """
+
+ @property
+ def deviceRoot(self):
+ """
+ The device root on the device filesystem for putting temporary
+ testing files.
+ """
+ # derive deviceroot value if not set
+ if not self._deviceRoot or not self._isDeviceRootSetup:
+ self._deviceRoot = self._setupDeviceRoot(self._deviceRoot)
+ self._isDeviceRootSetup = True
+
+ return self._deviceRoot
+
+ @abstractmethod
+ def _setupDeviceRoot(self):
+ """
+ Sets up and returns a device root location that can be written to by tests.
+ """
+
+ def getDeviceRoot(self):
+ """
+ Get the device root on the device filesystem for putting temporary
+ testing files.
+
+ .. deprecated:: 0.38
+ Use the :py:attr:`deviceRoot` property instead.
+ """
+ return self.deviceRoot
+
+ @abstractmethod
+ def getTempDir(self):
+ """
+ Returns a temporary directory we can use on this device, ensuring
+ also that it exists.
+ """
+
+ @abstractmethod
+ def shell(self, cmd, outputfile, env=None, cwd=None, timeout=None, root=False):
+ """
+ Executes shell command on device and returns exit code.
+
+ :param cmd: Commandline list to execute
+ :param outputfile: File to store output
+ :param env: Environment to pass to exec command
+ :param cwd: Directory to execute command from
+ :param timeout: specified in seconds, defaults to 'default_timeout'
+ :param root: Specifies whether command requires root privileges
+ """
+
+ def shellCheckOutput(self, cmd, env=None, cwd=None, timeout=None, root=False):
+ """
+ Executes shell command on device and returns output as a string. Raises if
+ the return code is non-zero.
+
+ :param cmd: Commandline list to execute
+ :param env: Environment to pass to exec command
+ :param cwd: Directory to execute command from
+ :param timeout: specified in seconds, defaults to 'default_timeout'
+ :param root: Specifies whether command requires root privileges
+ :raises: DMError
+ """
+ buf = StringIO.StringIO()
+ retval = self.shell(cmd, buf, env=env, cwd=cwd, timeout=timeout, root=root)
+ output = str(buf.getvalue()[0:-1]).rstrip()
+ buf.close()
+ if retval != 0:
+ raise DMError(
+ "Non-zero return code for command: %s "
+ "(output: '%s', retval: '%s')" % (cmd, output, retval))
+ return output
+
+ @abstractmethod
+ def getProcessList(self):
+ """
+ Returns array of tuples representing running processes on the device.
+
+ Format of tuples is (processId, processName, userId)
+ """
+
+ def processInfo(self, processName):
+ """
+ Returns information on the process with processName.
+ Information on process is in tuple format: (pid, process path, user)
+ If a process with the specified name does not exist this function will return None.
+ """
+ if not isinstance(processName, basestring):
+ raise TypeError("Process name %s is not a string" % processName)
+
+ processInfo = None
+
+ # filter out extra spaces
+ parts = filter(lambda x: x != '', processName.split(' '))
+ processName = ' '.join(parts)
+
+ # filter out the quoted env string if it exists
+ # ex: '"name=value;name2=value2;etc=..." process args' -> 'process args'
+ parts = processName.split('"')
+ if (len(parts) > 2):
+ processName = ' '.join(parts[2:]).strip()
+
+ pieces = processName.split(' ')
+ parts = pieces[0].split('/')
+ app = parts[-1]
+
+ procList = self.getProcessList()
+ if (procList == []):
+ return None
+
+ for proc in procList:
+ procName = proc[1].split('/')[-1]
+ if (procName == app):
+ processInfo = proc
+ break
+ return processInfo
+
+ def processExist(self, processName):
+ """
+ Returns True if process with name processName is running on device.
+ """
+ processInfo = self.processInfo(processName)
+ if processInfo:
+ return processInfo[0]
+
+ @abstractmethod
+ def killProcess(self, processName, sig=None):
+ """
+ Kills the process named processName. If sig is not None, process is
+ killed with the specified signal.
+
+ :param processName: path or name of the process to kill
+ :param sig: signal to pass into the kill command (optional)
+ """
+
+ @abstractmethod
+ def reboot(self, wait=False, ipAddr=None):
+ """
+ Reboots the device.
+
+ :param wait: block on device to come back up before returning
+ :param ipAddr: if specified, try to make the device connect to this
+ specific IP address after rebooting (only works with
+ SUT; if None, we try to determine a reasonable address
+ ourselves)
+ """
+
+ @abstractmethod
+ def installApp(self, appBundlePath, destPath=None):
+ """
+ Installs an application onto the device.
+
+ :param appBundlePath: path to the application bundle on the device
+ :param destPath: destination directory of where application should be
+ installed to (optional)
+ """
+
+ @abstractmethod
+ def uninstallApp(self, appName, installPath=None):
+ """
+ Uninstalls the named application from device and DOES NOT cause a reboot.
+
+ :param appName: the name of the application (e.g org.mozilla.fennec)
+ :param installPath: the path to where the application was installed (optional)
+ """
+
+ @abstractmethod
+ def uninstallAppAndReboot(self, appName, installPath=None):
+ """
+ Uninstalls the named application from device and causes a reboot.
+
+ :param appName: the name of the application (e.g org.mozilla.fennec)
+ :param installPath: the path to where the application was installed (optional)
+ """
+
+ @abstractmethod
+ def updateApp(self, appBundlePath, processName=None, destPath=None,
+ wait=False, ipAddr=None):
+ """
+ Updates the application on the device and reboots.
+
+ :param appBundlePath: path to the application bundle on the device
+ :param processName: used to end the process if the applicaiton is
+ currently running (optional)
+ :param destPath: Destination directory to where the application should
+ be installed (optional)
+ :param wait: block on device to come back up before returning
+ :param ipAddr: if specified, try to make the device connect to this
+ specific IP address after rebooting (only works with
+ SUT; if None and wait is True, we try to determine a
+ reasonable address ourselves)
+ """
+
+ @staticmethod
+ def _writePNG(buf, width, height):
+ """
+ Method for writing a PNG from a buffer, used by getScreenshot on older devices,
+ """
+ # Based on: http://code.activestate.com/recipes/577443-write-a-png-image-in-native-python/
+ width_byte_4 = width * 4
+ raw_data = b"".join(b'\x00' + buf[span:span + width_byte_4]
+ for span in range(0, (height - 1) * width * 4, width_byte_4))
+
+ def png_pack(png_tag, data):
+ chunk_head = png_tag + data
+ return struct.pack("!I", len(data)) \
+ + chunk_head \
+ + struct.pack("!I", 0xFFFFFFFF & zlib.crc32(chunk_head))
+ return b"".join([
+ b'\x89PNG\r\n\x1a\n',
+ png_pack(b'IHDR', struct.pack("!2I5B", width, height, 8, 6, 0, 0, 0)),
+ png_pack(b'IDAT', zlib.compress(raw_data, 9)),
+ png_pack(b'IEND', b'')])
+
+ @abstractmethod
+ def _getRemoteHash(self, filename):
+ """
+ Return the md5 sum of a file on the device.
+ """
+
+ @staticmethod
+ def _getLocalHash(filename):
+ """
+ Return the MD5 sum of a file on the host.
+ """
+ f = open(filename, 'rb')
+ if f is None:
+ return None
+
+ try:
+ mdsum = hashlib.md5()
+ except:
+ return None
+
+ while 1:
+ data = f.read(1024)
+ if not data:
+ break
+ mdsum.update(data)
+
+ f.close()
+ hexval = mdsum.hexdigest()
+ return hexval
+
+ @staticmethod
+ def _escapedCommandLine(cmd):
+ """
+ Utility function to return escaped and quoted version of command line.
+ """
+ quotedCmd = []
+
+ for arg in cmd:
+ arg.replace('&', '\&')
+
+ needsQuoting = False
+ for char in [' ', '(', ')', '"', '&']:
+ if arg.find(char) >= 0:
+ needsQuoting = True
+ break
+ if needsQuoting:
+ arg = '\'%s\'' % arg
+
+ quotedCmd.append(arg)
+
+ return " ".join(quotedCmd)
+
+
+def _pop_last_line(file_obj):
+ """
+ Utility function to get the last line from a file (shared between ADB and
+ SUT device managers). Function also removes it from the file. Intended to
+ strip off the return code from a shell command.
+ """
+ bytes_from_end = 1
+ file_obj.seek(0, 2)
+ length = file_obj.tell() + 1
+ while bytes_from_end < length:
+ file_obj.seek((-1) * bytes_from_end, 2)
+ data = file_obj.read()
+
+ if bytes_from_end == length - 1 and len(data) == 0: # no data, return None
+ return None
+
+ if data[0] == '\n' or bytes_from_end == length - 1:
+ # found the last line, which should have the return value
+ if data[0] == '\n':
+ data = data[1:]
+
+ # truncate off the return code line
+ file_obj.truncate(length - bytes_from_end)
+ file_obj.seek(0, 2)
+ file_obj.write('\0')
+
+ return data
+
+ bytes_from_end += 1
+
+ return None
+
+
+class ZeroconfListener(object):
+
+ def __init__(self, hwid, evt):
+ self.hwid = hwid
+ self.evt = evt
+
+ # Format is 'SUTAgent [hwid:015d2bc2825ff206] [ip:10_242_29_221]._sutagent._tcp.local.'
+ def addService(self, zeroconf, type, name):
+ # print "Found _sutagent service broadcast:", name
+ if not name.startswith("SUTAgent"):
+ return
+
+ sutname = name.split('.')[0]
+ m = re.search('\[hwid:([^\]]*)\]', sutname)
+ if m is None:
+ return
+
+ hwid = m.group(1)
+
+ m = re.search('\[ip:([0-9_]*)\]', sutname)
+ if m is None:
+ return
+
+ ip = m.group(1).replace("_", ".")
+
+ if self.hwid == hwid:
+ self.ip = ip
+ self.evt.set()
+
+ def removeService(self, zeroconf, type, name):
+ pass
diff --git a/testing/mozbase/mozdevice/mozdevice/devicemanagerADB.py b/testing/mozbase/mozdevice/mozdevice/devicemanagerADB.py
new file mode 100644
index 000000000..74d0a8d23
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/devicemanagerADB.py
@@ -0,0 +1,893 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import re
+import os
+import tempfile
+import time
+import traceback
+
+from distutils import dir_util
+
+from devicemanager import DeviceManager, DMError
+from mozprocess import ProcessHandler
+import mozfile
+import version_codes
+
+
+class DeviceManagerADB(DeviceManager):
+ """
+ Implementation of DeviceManager interface that uses the Android "adb"
+ utility to communicate with the device. Normally used to communicate
+ with a device that is directly connected with the host machine over a USB
+ port.
+ """
+
+ _haveRootShell = None
+ _haveSu = None
+ _suModifier = None
+ _lsModifier = None
+ _useZip = False
+ _logcatNeedsRoot = False
+ _pollingInterval = 0.01
+ _packageName = None
+ _tempDir = None
+ _adb_version = None
+ _sdk_version = None
+ connected = False
+
+ def __init__(self, host=None, port=5555, retryLimit=5, packageName='fennec',
+ adbPath=None, deviceSerial=None, deviceRoot=None,
+ logLevel=logging.ERROR, autoconnect=True, runAdbAsRoot=False,
+ serverHost=None, serverPort=None, **kwargs):
+ DeviceManager.__init__(self, logLevel=logLevel,
+ deviceRoot=deviceRoot)
+ self.host = host
+ self.port = port
+ self.retryLimit = retryLimit
+
+ self._serverHost = serverHost
+ self._serverPort = serverPort
+
+ # the path to adb, or 'adb' to assume that it's on the PATH
+ self._adbPath = adbPath or 'adb'
+
+ # The serial number of the device to use with adb, used in cases
+ # where multiple devices are being managed by the same adb instance.
+ self._deviceSerial = deviceSerial
+
+ # Some devices do no start adb as root, if allowed you can use
+ # this to reboot adbd on the device as root automatically
+ self._runAdbAsRoot = runAdbAsRoot
+
+ if packageName == 'fennec':
+ if os.getenv('USER'):
+ self._packageName = 'org.mozilla.fennec_' + os.getenv('USER')
+ else:
+ self._packageName = 'org.mozilla.fennec_'
+ elif packageName:
+ self._packageName = packageName
+
+ # verify that we can run the adb command. can't continue otherwise
+ self._verifyADB()
+
+ if autoconnect:
+ self.connect()
+
+ def connect(self):
+ if not self.connected:
+ # try to connect to the device over tcp/ip if we have a hostname
+ if self.host:
+ self._connectRemoteADB()
+
+ # verify that we can connect to the device. can't continue
+ self._verifyDevice()
+
+ # Note SDK version
+ try:
+ proc = self._runCmd(["shell", "getprop", "ro.build.version.sdk"],
+ timeout=self.short_timeout)
+ self._sdk_version = int(proc.output[0])
+ except (OSError, ValueError):
+ self._sdk_version = 0
+ self._logger.info("Detected Android sdk %d" % self._sdk_version)
+
+ # Some commands require root to work properly, even with ADB (e.g.
+ # grabbing APKs out of /data). For these cases, we check whether
+ # we're running as root. If that isn't true, check for the
+ # existence of an su binary
+ self._checkForRoot()
+
+ # can we use zip to speed up some file operations? (currently not
+ # required)
+ try:
+ self._verifyZip()
+ except DMError:
+ pass
+
+ def __del__(self):
+ if self.host:
+ self._disconnectRemoteADB()
+
+ def shell(self, cmd, outputfile, env=None, cwd=None, timeout=None, root=False):
+ # FIXME: this function buffers all output of the command into memory,
+ # always. :(
+
+ # If requested to run as root, check that we can actually do that
+ if root:
+ if self._haveRootShell is None and self._haveSu is None:
+ self._checkForRoot()
+ if not self._haveRootShell and not self._haveSu:
+ raise DMError(
+ "Shell command '%s' requested to run as root but root "
+ "is not available on this device. Root your device or "
+ "refactor the test/harness to not require root." %
+ self._escapedCommandLine(cmd))
+
+ # Getting the return code is more complex than you'd think because adb
+ # doesn't actually return the return code from a process, so we have to
+ # capture the output to get it
+ if root and self._haveSu:
+ cmdline = "su %s \"%s\"" % (self._suModifier,
+ self._escapedCommandLine(cmd))
+ else:
+ cmdline = self._escapedCommandLine(cmd)
+ cmdline += "; echo $?"
+
+ # prepend cwd and env to command if necessary
+ if cwd:
+ cmdline = "cd %s; %s" % (cwd, cmdline)
+ if env:
+ envstr = '; '.join(map(lambda x: 'export %s=%s' % (x[0], x[1]), env.iteritems()))
+ cmdline = envstr + "; " + cmdline
+
+ # all output should be in stdout
+ args = [self._adbPath]
+ if self._serverHost is not None:
+ args.extend(['-H', self._serverHost])
+ if self._serverPort is not None:
+ args.extend(['-P', str(self._serverPort)])
+ if self._deviceSerial:
+ args.extend(['-s', self._deviceSerial])
+ args.extend(["shell", cmdline])
+
+ def _timeout():
+ self._logger.error("Timeout exceeded for shell call '%s'" % ' '.join(args))
+
+ self._logger.debug("shell - command: %s" % ' '.join(args))
+ proc = ProcessHandler(args, processOutputLine=self._log, onTimeout=_timeout)
+
+ if not timeout:
+ # We are asserting that all commands will complete in this time unless
+ # otherwise specified
+ timeout = self.default_timeout
+
+ timeout = int(timeout)
+ proc.run(timeout)
+ proc.wait()
+ output = proc.output
+
+ if output:
+ lastline = output[-1]
+ if lastline:
+ m = re.search('([0-9]+)', lastline)
+ if m:
+ return_code = m.group(1)
+ for line in output:
+ outputfile.write(line + '\n')
+ outputfile.seek(-2, 2)
+ outputfile.truncate() # truncate off the return code
+ return int(return_code)
+
+ return None
+
+ def forward(self, local, remote):
+ """
+ Forward socket connections.
+
+ Forward specs are one of:
+ tcp:<port>
+ localabstract:<unix domain socket name>
+ localreserved:<unix domain socket name>
+ localfilesystem:<unix domain socket name>
+ dev:<character device name>
+ jdwp:<process pid> (remote only)
+ """
+ if not self._checkCmd(['forward', local, remote], timeout=self.short_timeout) == 0:
+ raise DMError("Failed to forward socket connection.")
+
+ def remove_forward(self, local=None):
+ """
+ Turn off forwarding of socket connection.
+ """
+ cmd = ['forward']
+ if local is None:
+ cmd.extend(['--remove-all'])
+ else:
+ cmd.extend(['--remove', local])
+ if not self._checkCmd(cmd, timeout=self.short_timeout) == 0:
+ raise DMError("Failed to remove connection forwarding.")
+
+ def remount(self):
+ "Remounts the /system partition on the device read-write."
+ return self._checkCmd(['remount'], timeout=self.short_timeout)
+
+ def devices(self):
+ "Return a list of connected devices as (serial, status) tuples."
+ proc = self._runCmd(['devices'])
+ proc.output.pop(0) # ignore first line of output
+ devices = []
+ for line in proc.output:
+ result = re.match('(.*?)\t(.*)', line)
+ if result:
+ devices.append((result.group(1), result.group(2)))
+ return devices
+
+ def _connectRemoteADB(self):
+ self._checkCmd(["connect", self.host + ":" + str(self.port)])
+
+ def _disconnectRemoteADB(self):
+ self._checkCmd(["disconnect", self.host + ":" + str(self.port)])
+
+ def pushFile(self, localname, destname, retryLimit=None, createDir=True):
+ # you might expect us to put the file *in* the directory in this case,
+ # but that would be different behaviour from devicemanagerSUT. Throw
+ # an exception so we have the same behaviour between the two
+ # implementations
+ retryLimit = retryLimit or self.retryLimit
+ if self.dirExists(destname):
+ raise DMError("Attempted to push a file (%s) to a directory (%s)!" %
+ (localname, destname))
+ if not os.access(localname, os.F_OK):
+ raise DMError("File not found: %s" % localname)
+
+ proc = self._runCmd(["push", os.path.realpath(localname), destname],
+ retryLimit=retryLimit)
+ if proc.returncode != 0:
+ raise DMError("Error pushing file %s -> %s; output: %s" %
+ (localname, destname, proc.output))
+
+ def mkDir(self, name):
+ result = self._runCmd(["shell", "mkdir", name], timeout=self.short_timeout).output
+ if len(result) and 'read-only file system' in result[0].lower():
+ raise DMError("Error creating directory: read only file system")
+
+ def pushDir(self, localDir, remoteDir, retryLimit=None, timeout=None):
+ # adb "push" accepts a directory as an argument, but if the directory
+ # contains symbolic links, the links are pushed, rather than the linked
+ # files; we either zip/unzip or re-copy the directory into a temporary
+ # one to get around this limitation
+ retryLimit = retryLimit or self.retryLimit
+ if self._useZip:
+ self.removeDir(remoteDir)
+ self.mkDirs(remoteDir + "/x")
+ try:
+ localZip = tempfile.mktemp() + ".zip"
+ remoteZip = remoteDir + "/adbdmtmp.zip"
+ proc = ProcessHandler(["zip", "-r", localZip, '.'], cwd=localDir,
+ processOutputLine=self._log)
+ proc.run()
+ proc.wait()
+ self.pushFile(localZip, remoteZip, retryLimit=retryLimit, createDir=False)
+ mozfile.remove(localZip)
+ data = self._runCmd(["shell", "unzip", "-o", remoteZip,
+ "-d", remoteDir]).output[0]
+ self._checkCmd(["shell", "rm", remoteZip],
+ retryLimit=retryLimit, timeout=self.short_timeout)
+ if re.search("unzip: exiting", data) or re.search("Operation not permitted", data):
+ raise Exception("unzip failed, or permissions error")
+ except:
+ self._logger.warning(traceback.format_exc())
+ self._logger.warning("zip/unzip failure: falling back to normal push")
+ self._useZip = False
+ self.pushDir(localDir, remoteDir, retryLimit=retryLimit, timeout=timeout)
+ else:
+ localDir = os.path.normpath(localDir)
+ remoteDir = os.path.normpath(remoteDir)
+ copyRequired = False
+ if self._adb_version >= '1.0.36' and \
+ os.path.isdir(localDir) and self.dirExists(remoteDir):
+ # See do_sync_push in
+ # https://android.googlesource.com/platform/system/core/+/master/adb/file_sync_client.cpp
+ # Work around change in behavior in adb 1.0.36 where if
+ # the remote destination directory exists, adb push will
+ # copy the source directory *into* the destination
+ # directory otherwise it will copy the source directory
+ # *onto* the destination directory.
+ #
+ # If the destination directory does exist, push to its
+ # parent directory. If the source and destination leaf
+ # directory names are different, copy the source directory
+ # to a temporary directory with the same leaf name as the
+ # destination so that when we push to the parent, the
+ # source is copied onto the destination directory.
+ localName = os.path.basename(localDir)
+ remoteName = os.path.basename(remoteDir)
+ if localName != remoteName:
+ copyRequired = True
+ tempParent = tempfile.mkdtemp()
+ newLocal = os.path.join(tempParent, remoteName)
+ dir_util.copy_tree(localDir, newLocal)
+ localDir = newLocal
+ remoteDir = '/'.join(remoteDir.rstrip('/').split('/')[:-1])
+ try:
+ self._checkCmd(["push", localDir, remoteDir],
+ retryLimit=retryLimit, timeout=timeout)
+ except:
+ raise
+ finally:
+ if copyRequired:
+ mozfile.remove(tempParent)
+
+ def dirExists(self, remotePath):
+ self._detectLsModifier()
+ data = self._runCmd(["shell", "ls", self._lsModifier, remotePath + '/'],
+ timeout=self.short_timeout).output
+
+ if len(data) == 1:
+ res = data[0]
+ if "Not a directory" in res or "No such file or directory" in res:
+ return False
+ return True
+
+ def fileExists(self, filepath):
+ self._detectLsModifier()
+ data = self._runCmd(["shell", "ls", self._lsModifier, filepath],
+ timeout=self.short_timeout).output
+ if len(data) == 1:
+ foundpath = data[0].decode('utf-8').rstrip()
+ if foundpath == filepath:
+ return True
+ return False
+
+ def removeFile(self, filename):
+ if self.fileExists(filename):
+ self._checkCmd(["shell", "rm", filename], timeout=self.short_timeout)
+
+ def removeDir(self, remoteDir):
+ if self.dirExists(remoteDir):
+ self._checkCmd(["shell", "rm", "-r", remoteDir], timeout=self.short_timeout)
+ else:
+ self.removeFile(remoteDir.strip())
+
+ def moveTree(self, source, destination):
+ self._checkCmd(["shell", "mv", source, destination], timeout=self.short_timeout)
+
+ def copyTree(self, source, destination):
+ self._checkCmd(["shell", "dd", "if=%s" % source, "of=%s" % destination])
+
+ def listFiles(self, rootdir):
+ self._detectLsModifier()
+ data = self._runCmd(["shell", "ls", self._lsModifier, rootdir],
+ timeout=self.short_timeout).output
+ data[:] = [item.rstrip('\r\n') for item in data]
+ if (len(data) == 1):
+ if (data[0] == rootdir):
+ return []
+ if (data[0].find("No such file or directory") != -1):
+ return []
+ if (data[0].find("Not a directory") != -1):
+ return []
+ if (data[0].find("Permission denied") != -1):
+ return []
+ if (data[0].find("opendir failed") != -1):
+ return []
+ if (data[0].find("Device or resource busy") != -1):
+ return []
+ return data
+
+ def getProcessList(self):
+ ret = []
+ p = self._runCmd(["shell", "ps"], timeout=self.short_timeout)
+ if not p or not p.output or len(p.output) < 1:
+ return ret
+ # first line is the headers
+ p.output.pop(0)
+ for proc in p.output:
+ els = proc.split()
+ # We need to figure out if this is "user pid name" or
+ # "pid user vsz stat command"
+ if els[1].isdigit():
+ ret.append(list([int(els[1]), els[len(els) - 1], els[0]]))
+ else:
+ ret.append(list([int(els[0]), els[len(els) - 1], els[1]]))
+ return ret
+
+ def fireProcess(self, appname, failIfRunning=False):
+ """
+ Starts a process
+
+ returns: pid
+
+ DEPRECATED: Use shell() or launchApplication() for new code
+ """
+ # strip out env vars
+ parts = appname.split('"')
+ if (len(parts) > 2):
+ parts = parts[2:]
+ return self.launchProcess(parts, failIfRunning)
+
+ def launchProcess(self, cmd, outputFile="process.txt", cwd='', env='', failIfRunning=False):
+ """
+ Launches a process, redirecting output to standard out
+
+ WARNING: Does not work how you expect on Android! The application's
+ own output will be flushed elsewhere.
+
+ DEPRECATED: Use shell() or launchApplication() for new code
+ """
+ if cmd[0] == "am":
+ self._checkCmd(["shell"] + cmd)
+ return outputFile
+
+ acmd = ["-W"]
+ cmd = ' '.join(cmd).strip()
+ i = cmd.find(" ")
+ # SUT identifies the URL by looking for :\\ -- another strategy to consider
+ re_url = re.compile('^[http|file|chrome|about].*')
+ last = cmd.rfind(" ")
+ uri = ""
+ args = ""
+ if re_url.match(cmd[last:].strip()):
+ args = cmd[i:last].strip()
+ uri = cmd[last:].strip()
+ else:
+ args = cmd[i:].strip()
+ acmd.append("-n")
+ acmd.append(cmd[0:i] + "/org.mozilla.gecko.BrowserApp")
+ if args != "":
+ acmd.append("--es")
+ acmd.append("args")
+ acmd.append(args)
+ if env != '' and env is not None:
+ envCnt = 0
+ # env is expected to be a dict of environment variables
+ for envkey, envval in env.iteritems():
+ acmd.append("--es")
+ acmd.append("env" + str(envCnt))
+ acmd.append(envkey + "=" + envval)
+ envCnt += 1
+ if uri != "":
+ acmd.append("-d")
+ acmd.append(uri)
+
+ acmd = ["shell", ' '.join(map(lambda x: '"' + x + '"', ["am", "start"] + acmd))]
+ self._logger.info(acmd)
+ self._checkCmd(acmd)
+ return outputFile
+
+ def killProcess(self, appname, sig=None):
+ shell_args = ["shell"]
+ if self._sdk_version >= version_codes.N:
+ # Bug 1334613 - force use of root
+ if self._haveRootShell is None and self._haveSu is None:
+ self._checkForRoot()
+ if not self._haveRootShell and not self._haveSu:
+ raise DMError(
+ "killProcess '%s' requested to run as root but root "
+ "is not available on this device. Root your device or "
+ "refactor the test/harness to not require root." %
+ appname)
+ if not self._haveRootShell:
+ shell_args.extend(["su", self._suModifier])
+
+ procs = self.getProcessList()
+ for (pid, name, user) in procs:
+ if name == appname:
+ args = list(shell_args)
+ args.append("kill")
+ if sig:
+ args.append("-%d" % sig)
+ args.append(str(pid))
+ p = self._runCmd(args, timeout=self.short_timeout)
+ if p.returncode != 0 and len(p.output) > 0 and \
+ 'No such process' not in p.output[0]:
+ raise DMError("Error killing process "
+ "'%s': %s" % (appname, p.output))
+
+ def _runPull(self, remoteFile, localFile):
+ """
+ Pulls remoteFile from device to host
+ """
+ try:
+ self._runCmd(["pull", remoteFile, localFile])
+ except (OSError, ValueError):
+ raise DMError("Error pulling remote file '%s' to '%s'" % (remoteFile, localFile))
+
+ def pullFile(self, remoteFile, offset=None, length=None):
+ # TODO: add debug flags and allow for printing stdout
+ with mozfile.NamedTemporaryFile() as tf:
+ self._runPull(remoteFile, tf.name)
+ # we need to reopen the file to get the written contents
+ with open(tf.name) as tf2:
+ # ADB pull does not support offset and length, but we can
+ # instead read only the requested portion of the local file
+ if offset is not None and length is not None:
+ tf2.seek(offset)
+ return tf2.read(length)
+ elif offset is not None:
+ tf2.seek(offset)
+ return tf2.read()
+ else:
+ return tf2.read()
+
+ def getFile(self, remoteFile, localFile):
+ self._runPull(remoteFile, localFile)
+
+ def getDirectory(self, remoteDir, localDir, checkDir=True):
+ localDir = os.path.normpath(localDir)
+ remoteDir = os.path.normpath(remoteDir)
+ copyRequired = False
+ originalLocal = localDir
+ if self._adb_version >= '1.0.36' and \
+ os.path.isdir(localDir) and self.dirExists(remoteDir):
+ # See do_sync_pull in
+ # https://android.googlesource.com/platform/system/core/+/master/adb/file_sync_client.cpp
+ # Work around change in behavior in adb 1.0.36 where if
+ # the local destination directory exists, adb pull will
+ # copy the source directory *into* the destination
+ # directory otherwise it will copy the source directory
+ # *onto* the destination directory.
+ #
+ # If the destination directory does exist, pull to its
+ # parent directory. If the source and destination leaf
+ # directory names are different, pull the source directory
+ # into a temporary directory and then copy the temporary
+ # directory onto the destination.
+ localName = os.path.basename(localDir)
+ remoteName = os.path.basename(remoteDir)
+ if localName != remoteName:
+ copyRequired = True
+ tempParent = tempfile.mkdtemp()
+ localDir = os.path.join(tempParent, remoteName)
+ else:
+ localDir = '/'.join(localDir.rstrip('/').split('/')[:-1])
+ self._runCmd(["pull", remoteDir, localDir]).wait()
+ if copyRequired:
+ dir_util.copy_tree(localDir, originalLocal)
+ mozfile.remove(tempParent)
+
+ def validateFile(self, remoteFile, localFile):
+ md5Remote = self._getRemoteHash(remoteFile)
+ md5Local = self._getLocalHash(localFile)
+ if md5Remote is None or md5Local is None:
+ return None
+ return md5Remote == md5Local
+
+ def _getRemoteHash(self, remoteFile):
+ """
+ Return the md5 sum of a file on the device
+ """
+ with tempfile.NamedTemporaryFile() as f:
+ self._runPull(remoteFile, f.name)
+
+ return self._getLocalHash(f.name)
+
+ def _setupDeviceRoot(self, deviceRoot):
+ # user-specified device root, create it and return it
+ if deviceRoot:
+ self.mkDir(deviceRoot)
+ return deviceRoot
+
+ # we must determine the device root ourselves
+ paths = [('/storage/sdcard0', 'tests'),
+ ('/storage/sdcard1', 'tests'),
+ ('/storage/sdcard', 'tests'),
+ ('/mnt/sdcard', 'tests'),
+ ('/sdcard', 'tests'),
+ ('/data/local', 'tests')]
+ for (basePath, subPath) in paths:
+ if self.dirExists(basePath):
+ root = os.path.join(basePath, subPath)
+ try:
+ self.mkDir(root)
+ return root
+ except:
+ pass
+
+ raise DMError("Unable to set up device root using paths: [%s]"
+ % ", ".join(["'%s'" % os.path.join(b, s) for b, s in paths]))
+
+ def getTempDir(self):
+ # Cache result to speed up operations depending
+ # on the temporary directory.
+ if not self._tempDir:
+ self._tempDir = "%s/tmp" % self.deviceRoot
+ self.mkDir(self._tempDir)
+
+ return self._tempDir
+
+ def reboot(self, wait=False, **kwargs):
+ self._checkCmd(["reboot"])
+ if wait:
+ self._checkCmd(["wait-for-device"])
+ if self._runAdbAsRoot:
+ self._adb_root()
+ self._checkCmd(["shell", "ls", "/sbin"], timeout=self.short_timeout)
+
+ def updateApp(self, appBundlePath, **kwargs):
+ return self._runCmd(["install", "-r", appBundlePath]).output
+
+ def getCurrentTime(self):
+ timestr = str(self._runCmd(["shell", "date", "+%s"], timeout=self.short_timeout).output[0])
+ if (not timestr or not timestr.isdigit()):
+ raise DMError("Unable to get current time using date (got: '%s')" % timestr)
+ return int(timestr) * 1000
+
+ def getInfo(self, directive=None):
+ directive = directive or "all"
+ ret = {}
+ if directive == "id" or directive == "all":
+ ret["id"] = self._runCmd(["get-serialno"], timeout=self.short_timeout).output[0]
+ if directive == "os" or directive == "all":
+ ret["os"] = self.shellCheckOutput(
+ ["getprop", "ro.build.display.id"], timeout=self.short_timeout)
+ if directive == "uptime" or directive == "all":
+ uptime = self.shellCheckOutput(["uptime"], timeout=self.short_timeout)
+ if not uptime:
+ raise DMError("error getting uptime")
+ m = re.match("up time: ((\d+) days, )*(\d{2}):(\d{2}):(\d{2})", uptime)
+ if m:
+ uptime = "%d days %d hours %d minutes %d seconds" % tuple(
+ [int(g or 0) for g in m.groups()[1:]])
+ ret["uptime"] = uptime
+ if directive == "process" or directive == "all":
+ data = self.shellCheckOutput(["ps"], timeout=self.short_timeout)
+ ret["process"] = data.split('\n')
+ if directive == "systime" or directive == "all":
+ ret["systime"] = self.shellCheckOutput(["date"], timeout=self.short_timeout)
+ if directive == "memtotal" or directive == "all":
+ meminfo = {}
+ for line in self.pullFile("/proc/meminfo").splitlines():
+ key, value = line.split(":")
+ meminfo[key] = value.strip()
+ ret["memtotal"] = meminfo["MemTotal"]
+ if directive == "disk" or directive == "all":
+ data = self.shellCheckOutput(
+ ["df", "/data", "/system", "/sdcard"], timeout=self.short_timeout)
+ ret["disk"] = data.split('\n')
+ self._logger.debug("getInfo: %s" % ret)
+ return ret
+
+ def uninstallApp(self, appName, installPath=None):
+ status = self._runCmd(["uninstall", appName]).output[0].strip()
+ if status != 'Success':
+ raise DMError("uninstall failed for %s. Got: %s" % (appName, status))
+
+ def uninstallAppAndReboot(self, appName, installPath=None):
+ self.uninstallApp(appName)
+ self.reboot()
+
+ def _runCmd(self, args, timeout=None, retryLimit=None):
+ """
+ Runs a command using adb
+ If timeout is specified, the process is killed after <timeout> seconds.
+
+ returns: instance of ProcessHandler
+ """
+ retryLimit = retryLimit or self.retryLimit
+ finalArgs = [self._adbPath]
+ if self._serverHost is not None:
+ finalArgs.extend(['-H', self._serverHost])
+ if self._serverPort is not None:
+ finalArgs.extend(['-P', str(self._serverPort)])
+ if self._deviceSerial:
+ finalArgs.extend(['-s', self._deviceSerial])
+ finalArgs.extend(args)
+ self._logger.debug("_runCmd - command: %s" % ' '.join(finalArgs))
+ if not timeout:
+ timeout = self.default_timeout
+
+ def _timeout():
+ self._logger.error("Timeout exceeded for _runCmd call '%s'" % ' '.join(finalArgs))
+
+ retries = 0
+ while retries < retryLimit:
+ proc = ProcessHandler(finalArgs, storeOutput=True,
+ processOutputLine=self._log, onTimeout=_timeout)
+ proc.run(timeout=timeout)
+ proc.returncode = proc.wait()
+ if proc.returncode is None:
+ proc.kill()
+ retries += 1
+ else:
+ return proc
+
+ # timeout is specified in seconds, and if no timeout is given,
+ # we will run until we hit the default_timeout specified in the __init__
+ def _checkCmd(self, args, timeout=None, retryLimit=None):
+ """
+ Runs a command using adb and waits for the command to finish.
+ If timeout is specified, the process is killed after <timeout> seconds.
+
+ returns: returncode from process
+ """
+ retryLimit = retryLimit or self.retryLimit
+ finalArgs = [self._adbPath]
+ if self._serverHost is not None:
+ finalArgs.extend(['-H', self._serverHost])
+ if self._serverPort is not None:
+ finalArgs.extend(['-P', str(self._serverPort)])
+ if self._deviceSerial:
+ finalArgs.extend(['-s', self._deviceSerial])
+ finalArgs.extend(args)
+ self._logger.debug("_checkCmd - command: %s" % ' '.join(finalArgs))
+ if not timeout:
+ # We are asserting that all commands will complete in this
+ # time unless otherwise specified
+ timeout = self.default_timeout
+
+ def _timeout():
+ self._logger.error("Timeout exceeded for _checkCmd call '%s'" % ' '.join(finalArgs))
+
+ timeout = int(timeout)
+ retries = 0
+ while retries < retryLimit:
+ proc = ProcessHandler(finalArgs, processOutputLine=self._log, onTimeout=_timeout)
+ proc.run(timeout=timeout)
+ ret_code = proc.wait()
+ if ret_code is None:
+ proc.kill()
+ retries += 1
+ else:
+ return ret_code
+
+ raise DMError("Timeout exceeded for _checkCmd call after %d retries." % retries)
+
+ def chmodDir(self, remoteDir, mask="777"):
+ if (self.dirExists(remoteDir)):
+ if '/sdcard' in remoteDir:
+ self._logger.debug("chmod %s -- skipped (/sdcard)" % remoteDir)
+ else:
+ files = self.listFiles(remoteDir.strip())
+ for f in files:
+ remoteEntry = remoteDir.strip() + "/" + f.strip()
+ if (self.dirExists(remoteEntry)):
+ self.chmodDir(remoteEntry)
+ else:
+ self._checkCmd(["shell", "chmod", mask, remoteEntry],
+ timeout=self.short_timeout)
+ self._logger.info("chmod %s" % remoteEntry)
+ self._checkCmd(["shell", "chmod", mask, remoteDir], timeout=self.short_timeout)
+ self._logger.debug("chmod %s" % remoteDir)
+ else:
+ self._checkCmd(["shell", "chmod", mask, remoteDir.strip()], timeout=self.short_timeout)
+ self._logger.debug("chmod %s" % remoteDir.strip())
+
+ def _verifyADB(self):
+ """
+ Check to see if adb itself can be executed.
+ """
+ if self._adbPath != 'adb':
+ if not os.access(self._adbPath, os.X_OK):
+ raise DMError("invalid adb path, or adb not executable: %s" % self._adbPath)
+
+ try:
+ re_version = re.compile(r'Android Debug Bridge version (.*)')
+ proc = self._runCmd(["version"], timeout=self.short_timeout)
+ self._adb_version = re_version.match(proc.output[0]).group(1)
+ self._logger.info("Detected adb %s" % self._adb_version)
+ except os.error as err:
+ raise DMError(
+ "unable to execute ADB (%s): ensure Android SDK is installed "
+ "and adb is in your $PATH" % err)
+
+ def _verifyDevice(self):
+ # If there is a device serial number, see if adb is connected to it
+ if self._deviceSerial:
+ deviceStatus = None
+ for line in self._runCmd(["devices"]).output:
+ m = re.match('(.+)?\s+(.+)$', line)
+ if m:
+ if self._deviceSerial == m.group(1):
+ deviceStatus = m.group(2)
+ if deviceStatus is None:
+ raise DMError("device not found: %s" % self._deviceSerial)
+ elif deviceStatus != "device":
+ raise DMError("bad status for device %s: %s" % (self._deviceSerial, deviceStatus))
+
+ # Check to see if we can connect to device and run a simple command
+ if not self._checkCmd(["shell", "echo"], timeout=self.short_timeout) == 0:
+ raise DMError("unable to connect to device")
+
+ def _checkForRoot(self):
+ self._haveRootShell = False
+ self._haveSu = False
+ # If requested to attempt to run adbd as root, do so before
+ # checking whether adbs is running as root.
+ if self._runAdbAsRoot:
+ self._adb_root()
+
+ # Check whether we _are_ root by default (some development boards work
+ # this way, this is also the result of some relatively rare rooting
+ # techniques)
+ proc = self._runCmd(["shell", "id"], timeout=self.short_timeout)
+ if proc.output and 'uid=0(root)' in proc.output[0]:
+ self._haveRootShell = True
+ # if this returns true, we don't care about su
+ return
+
+ # if root shell is not available, check if 'su' can be used to gain
+ # root
+ def su_id(su_modifier, timeout):
+ proc = self._runCmd(["shell", "su", su_modifier, "id"],
+ timeout=timeout)
+
+ # wait for response for maximum of 15 seconds, in case su
+ # prompts for a password or triggers the Android SuperUser
+ # prompt
+ start_time = time.time()
+ retcode = None
+ while (time.time() - start_time) <= 15 and retcode is None:
+ retcode = proc.poll()
+ if retcode is None: # still not terminated, kill
+ proc.kill()
+
+ if proc.output and 'uid=0(root)' in proc.output[0]:
+ return True
+ return False
+
+ if su_id('0', self.short_timeout):
+ self._haveSu = True
+ self._suModifier = '0'
+ elif su_id('-c', self.short_timeout):
+ self._haveSu = True
+ self._suModifier = '-c'
+
+ def _isUnzipAvailable(self):
+ data = self._runCmd(["shell", "unzip"]).output
+ for line in data:
+ if (re.search('Usage', line)):
+ return True
+ return False
+
+ def _isLocalZipAvailable(self):
+ def _noOutput(line):
+ # suppress output from zip ProcessHandler
+ pass
+ try:
+ proc = ProcessHandler(["zip", "-?"], storeOutput=False, processOutputLine=_noOutput)
+ proc.run()
+ proc.wait()
+ except:
+ return False
+ return True
+
+ def _verifyZip(self):
+ # If "zip" can be run locally, and "unzip" can be run remotely, then pushDir
+ # can use these to push just one file per directory -- a significant
+ # optimization for large directories.
+ self._useZip = False
+ if (self._isUnzipAvailable() and self._isLocalZipAvailable()):
+ self._logger.info("will use zip to push directories")
+ self._useZip = True
+ else:
+ raise DMError("zip not available")
+
+ def _adb_root(self):
+ """ Some devices require us to reboot adbd as root.
+ This function takes care of it.
+ """
+ if self.processInfo("adbd")[2] != "root":
+ self._checkCmd(["root"])
+ self._checkCmd(["wait-for-device"])
+ if self.processInfo("adbd")[2] != "root":
+ raise DMError("We tried rebooting adbd as root, however, it failed.")
+
+ def _detectLsModifier(self):
+ if self._lsModifier is None:
+ # Check if busybox -1A is required in order to get one
+ # file per line.
+ output = self._runCmd(["shell", "ls", "-1A", "/"],
+ timeout=self.short_timeout).output
+ output = ' '.join(output)
+ if 'error: device not found' in output:
+ raise DMError(output)
+ if "Unknown option '-1'. Aborting." in output:
+ self._lsModifier = "-a"
+ elif "No such file or directory" in output:
+ self._lsModifier = "-a"
+ else:
+ self._lsModifier = "-1A"
diff --git a/testing/mozbase/mozdevice/mozdevice/devicemanagerSUT.py b/testing/mozbase/mozdevice/mozdevice/devicemanagerSUT.py
new file mode 100644
index 000000000..7816a3fdd
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/devicemanagerSUT.py
@@ -0,0 +1,975 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import datetime
+import logging
+import moznetwork
+import select
+import socket
+import time
+import os
+import re
+import posixpath
+import subprocess
+import StringIO
+from devicemanager import DeviceManager, DMError, _pop_last_line
+import errno
+from distutils.version import StrictVersion
+
+
+class DeviceManagerSUT(DeviceManager):
+ """
+ Implementation of DeviceManager interface that speaks to a device over
+ TCP/IP using the "system under test" protocol. A software agent such as
+ Negatus (http://github.com/mozilla/Negatus) or the Mozilla Android SUTAgent
+ app must be present and listening for connections for this to work.
+ """
+
+ _base_prompt = '$>'
+ _base_prompt_re = '\$\>'
+ _prompt_sep = '\x00'
+ _prompt_regex = '.*(' + _base_prompt_re + _prompt_sep + ')'
+ _agentErrorRE = re.compile('^##AGENT-WARNING##\ ?(.*)')
+
+ reboot_timeout = 600
+ reboot_settling_time = 60
+
+ def __init__(self, host, port=20701, retryLimit=5, deviceRoot=None,
+ logLevel=logging.ERROR, **kwargs):
+ DeviceManager.__init__(self, logLevel=logLevel,
+ deviceRoot=deviceRoot)
+ self.host = host
+ self.port = port
+ self.retryLimit = retryLimit
+ self._sock = None
+ self._everConnected = False
+
+ # Get version
+ verstring = self._runCmds([{'cmd': 'ver'}])
+ ver_re = re.match('(\S+) Version (\S+)', verstring)
+ self.agentProductName = ver_re.group(1)
+ self.agentVersion = ver_re.group(2)
+
+ def _cmdNeedsResponse(self, cmd):
+ """ Not all commands need a response from the agent:
+ * rebt obviously doesn't get a response
+ * uninstall performs a reboot to ensure starting in a clean state and
+ so also doesn't look for a response
+ """
+ noResponseCmds = [re.compile('^rebt'),
+ re.compile('^uninst .*$'),
+ re.compile('^pull .*$')]
+
+ for c in noResponseCmds:
+ if (c.match(cmd)):
+ return False
+
+ # If the command is not in our list, then it gets a response
+ return True
+
+ def _stripPrompt(self, data):
+ """
+ take a data blob and strip instances of the prompt '$>\x00'
+ """
+ promptre = re.compile(self._prompt_regex + '.*')
+ retVal = []
+ lines = data.split('\n')
+ for line in lines:
+ foundPrompt = False
+ try:
+ while (promptre.match(line)):
+ foundPrompt = True
+ pieces = line.split(self._prompt_sep)
+ index = pieces.index('$>')
+ pieces.pop(index)
+ line = self._prompt_sep.join(pieces)
+ except(ValueError):
+ pass
+
+ # we don't want to append lines that are blank after stripping the
+ # prompt (those are basically "prompts")
+ if not foundPrompt or line:
+ retVal.append(line)
+
+ return '\n'.join(retVal)
+
+ def _shouldCmdCloseSocket(self, cmd):
+ """
+ Some commands need to close the socket after they are sent:
+ * rebt
+ * uninst
+ * quit
+ """
+ socketClosingCmds = [re.compile('^quit.*'),
+ re.compile('^rebt.*'),
+ re.compile('^uninst .*$')]
+
+ for c in socketClosingCmds:
+ if (c.match(cmd)):
+ return True
+ return False
+
+ def _sendCmds(self, cmdlist, outputfile, timeout=None, retryLimit=None):
+ """
+ Wrapper for _doCmds that loops up to retryLimit iterations
+ """
+ # this allows us to move the retry logic outside of the _doCmds() to make it
+ # easier for debugging in the future.
+ # note that since cmdlist is a list of commands, they will all be retried if
+ # one fails. this is necessary in particular for pushFile(), where we don't want
+ # to accidentally send extra data if a failure occurs during data transmission.
+
+ retryLimit = retryLimit or self.retryLimit
+ retries = 0
+ while retries < retryLimit:
+ try:
+ self._doCmds(cmdlist, outputfile, timeout)
+ return
+ except DMError as err:
+ # re-raise error if it's fatal (i.e. the device got the command but
+ # couldn't execute it). retry otherwise
+ if err.fatal:
+ raise err
+ self._logger.debug(err)
+ retries += 1
+ # if we lost the connection or failed to establish one, wait a bit
+ if retries < retryLimit and not self._sock:
+ sleep_time = 5 * retries
+ self._logger.info('Could not connect; sleeping for %d seconds.' % sleep_time)
+ time.sleep(sleep_time)
+
+ raise DMError("Remote Device Error: unable to connect to %s after %s attempts" %
+ (self.host, retryLimit))
+
+ def _runCmds(self, cmdlist, timeout=None, retryLimit=None):
+ """
+ Similar to _sendCmds, but just returns any output as a string instead of
+ writing to a file
+ """
+ retryLimit = retryLimit or self.retryLimit
+ outputfile = StringIO.StringIO()
+ self._sendCmds(cmdlist, outputfile, timeout, retryLimit=retryLimit)
+ outputfile.seek(0)
+ return outputfile.read()
+
+ def _doCmds(self, cmdlist, outputfile, timeout):
+ promptre = re.compile(self._prompt_regex + '$')
+ shouldCloseSocket = False
+
+ if not timeout:
+ # We are asserting that all commands will complete in this time unless
+ # otherwise specified
+ timeout = self.default_timeout
+
+ if not self._sock:
+ try:
+ if self._everConnected:
+ self._logger.info("reconnecting socket")
+ self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ except socket.error as msg:
+ self._sock = None
+ raise DMError("Automation Error: unable to create socket: " + str(msg))
+
+ try:
+ self._sock.settimeout(float(timeout))
+ self._sock.connect((self.host, int(self.port)))
+ self._everConnected = True
+ except socket.error as msg:
+ self._sock = None
+ raise DMError("Remote Device Error: Unable to connect socket: " + str(msg))
+
+ # consume prompt
+ try:
+ self._sock.recv(1024)
+ except socket.error as msg:
+ self._sock.close()
+ self._sock = None
+ raise DMError(
+ "Remote Device Error: Did not get prompt after connecting: " + str(msg),
+ fatal=True)
+
+ # future recv() timeouts are handled by select() calls
+ self._sock.settimeout(None)
+
+ for cmd in cmdlist:
+ cmdline = '%s\r\n' % cmd['cmd']
+
+ try:
+ sent = self._sock.send(cmdline)
+ if sent != len(cmdline):
+ raise DMError("Remote Device Error: our cmd was %s bytes and we "
+ "only sent %s" % (len(cmdline), sent))
+ if cmd.get('data'):
+ totalsent = 0
+ while totalsent < len(cmd['data']):
+ sent = self._sock.send(cmd['data'][totalsent:])
+ self._logger.debug("sent %s bytes of data payload" % sent)
+ if sent == 0:
+ raise DMError("Socket connection broken when sending data")
+ totalsent += sent
+
+ self._logger.debug("sent cmd: %s" % cmd['cmd'])
+ except socket.error as msg:
+ self._sock.close()
+ self._sock = None
+ self._logger.error("Remote Device Error: Error sending data"
+ " to socket. cmd=%s; err=%s" % (cmd['cmd'], msg))
+ return False
+
+ # Check if the command should close the socket
+ shouldCloseSocket = self._shouldCmdCloseSocket(cmd['cmd'])
+
+ # Handle responses from commands
+ if self._cmdNeedsResponse(cmd['cmd']):
+ foundPrompt = False
+ data = ""
+ timer = 0
+ select_timeout = 1
+ commandFailed = False
+
+ while not foundPrompt:
+ socketClosed = False
+ errStr = ''
+ temp = ''
+ self._logger.debug("recv'ing...")
+
+ # Get our response
+ try:
+ # Wait up to a second for socket to become ready for reading...
+ if select.select([self._sock], [], [], select_timeout)[0]:
+ temp = self._sock.recv(1024)
+ self._logger.debug(u"response: %s" % temp.decode('utf8', 'replace'))
+ timer = 0
+ if not temp:
+ socketClosed = True
+ errStr = 'connection closed'
+ timer += select_timeout
+ if timer > timeout:
+ self._sock.close()
+ self._sock = None
+ raise DMError("Automation Error: Timeout in command %s" %
+ cmd['cmd'], fatal=True)
+ except socket.error as err:
+ socketClosed = True
+ errStr = str(err)
+ # This error shows up with we have our tegra rebooted.
+ if err[0] == errno.ECONNRESET:
+ errStr += ' - possible reboot'
+
+ if socketClosed:
+ self._sock.close()
+ self._sock = None
+ raise DMError(
+ "Automation Error: Error receiving data from socket. "
+ "cmd=%s; err=%s" % (cmd, errStr))
+
+ data += temp
+
+ # If something goes wrong in the agent it will send back a string that
+ # starts with '##AGENT-WARNING##'
+ if not commandFailed:
+ errorMatch = self._agentErrorRE.match(data)
+ if errorMatch:
+ # We still need to consume the prompt, so raise an error after
+ # draining the rest of the buffer.
+ commandFailed = True
+
+ for line in data.splitlines():
+ if promptre.match(line):
+ foundPrompt = True
+ data = self._stripPrompt(data)
+ break
+
+ # periodically flush data to output file to make sure it doesn't get
+ # too big/unwieldly
+ if len(data) > 1024:
+ outputfile.write(data[0:1024])
+ data = data[1024:]
+
+ if commandFailed:
+ raise DMError("Automation Error: Error processing command '%s'; err='%s'" %
+ (cmd['cmd'], errorMatch.group(1)), fatal=True)
+
+ # Write any remaining data to outputfile
+ outputfile.write(data)
+
+ if shouldCloseSocket:
+ try:
+ self._sock.close()
+ self._sock = None
+ except:
+ self._sock = None
+ raise DMError("Automation Error: Error closing socket")
+
+ def _setupDeviceRoot(self, deviceRoot):
+ if not deviceRoot:
+ deviceRoot = "%s/tests" % self._runCmds(
+ [{'cmd': 'testroot'}]).strip()
+ self.mkDir(deviceRoot)
+
+ return deviceRoot
+
+ def shell(self, cmd, outputfile, env=None, cwd=None, timeout=None, root=False):
+ cmdline = self._escapedCommandLine(cmd)
+ if env:
+ cmdline = '%s %s' % (self._formatEnvString(env), cmdline)
+
+ # execcwd/execcwdsu currently unsupported in Negatus; see bug 824127.
+ if cwd and self.agentProductName == 'SUTAgentNegatus':
+ raise DMError("Negatus does not support execcwd/execcwdsu")
+
+ haveExecSu = (self.agentProductName == 'SUTAgentNegatus' or
+ StrictVersion(self.agentVersion) >= StrictVersion('1.13'))
+
+ # Depending on agent version we send one of the following commands here:
+ # * exec (run as normal user)
+ # * execsu (run as privileged user)
+ # * execcwd (run as normal user from specified directory)
+ # * execcwdsu (run as privileged user from specified directory)
+
+ cmd = "exec"
+ if cwd:
+ cmd += "cwd"
+ if root and haveExecSu:
+ cmd += "su"
+
+ if cwd:
+ self._sendCmds([{'cmd': '%s %s %s' % (cmd, cwd, cmdline)}], outputfile, timeout)
+ else:
+ if (not root) or haveExecSu:
+ self._sendCmds([{'cmd': '%s %s' % (cmd, cmdline)}], outputfile, timeout)
+ else:
+ # need to manually inject su -c for backwards compatibility (this may
+ # not work on ICS or above!!)
+ # (FIXME: this backwards compatibility code is really ugly and should
+ # be deprecated at some point in the future)
+ self._sendCmds([{'cmd': '%s su -c "%s"' % (cmd, cmdline)}], outputfile,
+ timeout)
+
+ # dig through the output to get the return code
+ lastline = _pop_last_line(outputfile)
+ if lastline:
+ m = re.search('return code \[([0-9]+)\]', lastline)
+ if m:
+ return int(m.group(1))
+
+ # woops, we couldn't find an end of line/return value
+ raise DMError(
+ "Automation Error: Error finding end of line/return value when running '%s'" % cmdline)
+
+ def pushFile(self, localname, destname, retryLimit=None, createDir=True):
+ retryLimit = retryLimit or self.retryLimit
+ if createDir:
+ self.mkDirs(destname)
+
+ try:
+ filesize = os.path.getsize(localname)
+ with open(localname, 'rb') as f:
+ remoteHash = self._runCmds([{'cmd': 'push ' + destname + ' ' + str(filesize),
+ 'data': f.read()}], retryLimit=retryLimit).strip()
+ except OSError:
+ raise DMError("DeviceManager: Error reading file to push")
+
+ self._logger.debug("push returned: %s" % remoteHash)
+
+ localHash = self._getLocalHash(localname)
+
+ if localHash != remoteHash:
+ raise DMError("Automation Error: Push File failed to Validate! (localhash: %s, "
+ "remotehash: %s)" % (localHash, remoteHash))
+
+ def mkDir(self, name):
+ if not self.dirExists(name):
+ self._runCmds([{'cmd': 'mkdr ' + name}])
+
+ def pushDir(self, localDir, remoteDir, retryLimit=None, timeout=None):
+ retryLimit = retryLimit or self.retryLimit
+ self._logger.info("pushing directory: %s to %s" % (localDir, remoteDir))
+
+ existentDirectories = []
+ for root, dirs, files in os.walk(localDir, followlinks=True):
+ _, subpath = root.split(localDir)
+ subpath = subpath.lstrip('/')
+ remoteRoot = posixpath.join(remoteDir, subpath)
+ for f in files:
+ remoteName = posixpath.join(remoteRoot, f)
+
+ if subpath == "":
+ remoteRoot = remoteDir
+
+ parent = os.path.dirname(remoteName)
+ if parent not in existentDirectories:
+ self.mkDirs(remoteName)
+ existentDirectories.append(parent)
+
+ self.pushFile(os.path.join(root, f), remoteName,
+ retryLimit=retryLimit, createDir=False)
+
+ def dirExists(self, remotePath):
+ ret = self._runCmds([{'cmd': 'isdir ' + remotePath}]).strip()
+ if not ret:
+ raise DMError('Automation Error: DeviceManager isdir returned null')
+
+ return ret == 'TRUE'
+
+ def fileExists(self, filepath):
+ # Because we always have / style paths we make this a lot easier with some
+ # assumptions
+ filepath = posixpath.normpath(filepath)
+ # / should always exist but we can use this to check for things like
+ # having access to the filesystem
+ if filepath == '/':
+ return self.dirExists(filepath)
+ (containingpath, filename) = posixpath.split(filepath)
+ return filename in self.listFiles(containingpath)
+
+ def listFiles(self, rootdir):
+ rootdir = posixpath.normpath(rootdir)
+ if not self.dirExists(rootdir):
+ return []
+ data = self._runCmds([{'cmd': 'cd ' + rootdir}, {'cmd': 'ls'}])
+
+ files = filter(lambda x: x, data.splitlines())
+ if len(files) == 1 and files[0] == '<empty>':
+ # special case on the agent: empty directories return just the
+ # string "<empty>"
+ return []
+ return files
+
+ def removeFile(self, filename):
+ self._logger.info("removing file: " + filename)
+ if self.fileExists(filename):
+ self._runCmds([{'cmd': 'rm ' + filename}])
+
+ def removeDir(self, remoteDir):
+ if self.dirExists(remoteDir):
+ self._runCmds([{'cmd': 'rmdr ' + remoteDir}])
+
+ def moveTree(self, source, destination):
+ self._runCmds([{'cmd': 'mv %s %s' % (source, destination)}])
+
+ def copyTree(self, source, destination):
+ self._runCmds([{'cmd': 'dd if=%s of=%s' % (source, destination)}])
+
+ def getProcessList(self):
+ data = self._runCmds([{'cmd': 'ps'}])
+
+ processTuples = []
+ for line in data.splitlines():
+ if line:
+ pidproc = line.strip().split()
+ try:
+ if (len(pidproc) == 2):
+ processTuples += [[pidproc[0], pidproc[1]]]
+ elif (len(pidproc) == 3):
+ # android returns <userID> <procID> <procName>
+ processTuples += [[int(pidproc[1]), pidproc[2], int(pidproc[0])]]
+ else:
+ # unexpected format
+ raise ValueError
+ except ValueError:
+ self._logger.error("Unable to parse process list (bug 805969)")
+ self._logger.error("Line: %s\nFull output of process list:\n%s" % (line, data))
+ raise DMError("Invalid process line: %s" % line)
+
+ return processTuples
+
+ def fireProcess(self, appname, failIfRunning=False, maxWaitTime=30):
+ """
+ Starts a process
+
+ returns: pid
+
+ DEPRECATED: Use shell() or launchApplication() for new code
+ """
+ if not appname:
+ raise DMError("Automation Error: fireProcess called with no command to run")
+
+ self._logger.info("FIRE PROC: '%s'" % appname)
+
+ if (self.processExist(appname) is None):
+ self._logger.warning("process %s appears to be running already\n" % appname)
+ if (failIfRunning):
+ raise DMError("Automation Error: Process is already running")
+
+ self._runCmds([{'cmd': 'exec ' + appname}])
+
+ # The 'exec' command may wait for the process to start and end, so checking
+ # for the process here may result in process = None.
+ # The normal case is to launch the process and return right away
+ # There is one case with robotium (am instrument) where exec returns at the end
+ pid = None
+ waited = 0
+ while pid is None and waited < maxWaitTime:
+ pid = self.processExist(appname)
+ if pid:
+ break
+ time.sleep(1)
+ waited += 1
+
+ self._logger.debug("got pid: %s for process: %s" % (pid, appname))
+ return pid
+
+ def launchProcess(self, cmd, outputFile="process.txt", cwd='', env='', failIfRunning=False):
+ """
+ Launches a process, redirecting output to standard out
+
+ Returns output filename
+
+ WARNING: Does not work how you expect on Android! The application's
+ own output will be flushed elsewhere.
+
+ DEPRECATED: Use shell() or launchApplication() for new code
+ """
+ if not cmd:
+ self._logger.warning("launchProcess called without command to run")
+ return None
+
+ if cmd[0] == 'am' and hasattr(self, '_getExtraAmStartArgs'):
+ cmd = cmd[:2] + self._getExtraAmStartArgs() + cmd[2:]
+
+ cmdline = subprocess.list2cmdline(cmd)
+ if outputFile == "process.txt" or outputFile is None:
+ outputFile += "%s/process.txt" % self.deviceRoot
+ cmdline += " > " + outputFile
+
+ # Prepend our env to the command
+ cmdline = '%s %s' % (self._formatEnvString(env), cmdline)
+
+ # fireProcess may trigger an exception, but we won't handle it
+ if cmd[0] == "am":
+ # Robocop tests spawn "am instrument". sutAgent's exec ensures that
+ # am has started before returning, so there is no point in having
+ # fireProcess wait for it to start. Also, since "am" does not show
+ # up in the process list while the test is running, waiting for it
+ # in fireProcess is difficult.
+ self.fireProcess(cmdline, failIfRunning, 0)
+ else:
+ self.fireProcess(cmdline, failIfRunning)
+ return outputFile
+
+ def killProcess(self, appname, sig=None):
+ if sig:
+ pid = self.processExist(appname)
+ if pid and pid > 0:
+ try:
+ self.shellCheckOutput(['kill', '-%d' % sig, str(pid)],
+ root=True)
+ except DMError as err:
+ self._logger.warning("unable to kill -%d %s (pid %s)" %
+ (sig, appname, str(pid)))
+ self._logger.debug(err)
+ raise err
+ else:
+ self._logger.warning("unable to kill -%d %s -- not running?" %
+ (sig, appname))
+ else:
+ retries = 0
+ while retries < self.retryLimit:
+ try:
+ if self.processExist(appname):
+ self._runCmds([{'cmd': 'kill ' + appname}])
+ return
+ except DMError as err:
+ retries += 1
+ self._logger.warning("try %d of %d failed to kill %s" %
+ (retries, self.retryLimit, appname))
+ self._logger.debug(err)
+ if retries >= self.retryLimit:
+ raise err
+
+ def getTempDir(self):
+ return self._runCmds([{'cmd': 'tmpd'}]).strip()
+
+ def pullFile(self, remoteFile, offset=None, length=None):
+ # The "pull" command is different from other commands in that DeviceManager
+ # has to read a certain number of bytes instead of just reading to the
+ # next prompt. This is more robust than the "cat" command, which will be
+ # confused if the prompt string exists within the file being catted.
+ # However it means we can't use the response-handling logic in sendCMD().
+
+ def err(error_msg):
+ err_str = 'DeviceManager: pull unsuccessful: %s' % error_msg
+ self._logger.error(err_str)
+ self._sock = None
+ raise DMError(err_str)
+
+ # FIXME: We could possibly move these socket-reading functions up to
+ # the class level if we wanted to refactor sendCMD(). For now they are
+ # only used to pull files.
+
+ def uread(to_recv, error_msg):
+ """ unbuffered read """
+ try:
+ data = ""
+ if select.select([self._sock], [], [], self.default_timeout)[0]:
+ data = self._sock.recv(to_recv)
+ if not data:
+ # timed out waiting for response or error response
+ err(error_msg)
+
+ return data
+ except:
+ err(error_msg)
+
+ def read_until_char(c, buf, error_msg):
+ """ read until 'c' is found; buffer rest """
+ while c not in buf:
+ data = uread(1024, error_msg)
+ buf += data
+ return buf.partition(c)
+
+ def read_exact(total_to_recv, buf, error_msg):
+ """ read exact number of 'total_to_recv' bytes """
+ while len(buf) < total_to_recv:
+ to_recv = min(total_to_recv - len(buf), 1024)
+ data = uread(to_recv, error_msg)
+ buf += data
+ return buf
+
+ prompt = self._base_prompt + self._prompt_sep
+ buf = ''
+
+ # expected return value:
+ # <filename>,<filesize>\n<filedata>
+ # or, if error,
+ # <filename>,-1\n<error message>
+
+ # just send the command first, we read the response inline below
+ if offset is not None and length is not None:
+ cmd = 'pull %s %d %d' % (remoteFile, offset, length)
+ elif offset is not None:
+ cmd = 'pull %s %d' % (remoteFile, offset)
+ else:
+ cmd = 'pull %s' % remoteFile
+
+ self._runCmds([{'cmd': cmd}])
+
+ # read metadata; buffer the rest
+ metadata, sep, buf = read_until_char('\n', buf, 'could not find metadata')
+ if not metadata:
+ return None
+ self._logger.debug('metadata: %s' % metadata)
+
+ filename, sep, filesizestr = metadata.partition(',')
+ if sep == '':
+ err('could not find file size in returned metadata')
+ try:
+ filesize = int(filesizestr)
+ except ValueError:
+ err('invalid file size in returned metadata')
+
+ if filesize == -1:
+ # read error message
+ error_str, sep, buf = read_until_char('\n', buf, 'could not find error message')
+ if not error_str:
+ err("blank error message")
+ # prompt should follow
+ read_exact(len(prompt), buf, 'could not find prompt')
+ # failures are expected, so don't use "Remote Device Error" or we'll RETRY
+ raise DMError("DeviceManager: pulling file '%s' unsuccessful: %s" %
+ (remoteFile, error_str))
+
+ # read file data
+ total_to_recv = filesize + len(prompt)
+ buf = read_exact(total_to_recv, buf, 'could not get all file data')
+ if buf[-len(prompt):] != prompt:
+ err('no prompt found after file data--DeviceManager may be out of sync with agent')
+ return buf
+ return buf[:-len(prompt)]
+
+ def getFile(self, remoteFile, localFile):
+ data = self.pullFile(remoteFile)
+
+ fhandle = open(localFile, 'wb')
+ fhandle.write(data)
+ fhandle.close()
+ if not self.validateFile(remoteFile, localFile):
+ raise DMError("Automation Error: Failed to validate file when downloading %s" %
+ remoteFile)
+
+ def getDirectory(self, remoteDir, localDir, checkDir=True):
+ self._logger.info("getting files in '%s'" % remoteDir)
+ if checkDir and not self.dirExists(remoteDir):
+ raise DMError("Automation Error: Error getting directory: %s not a directory" %
+ remoteDir)
+
+ filelist = self.listFiles(remoteDir)
+ self._logger.debug(filelist)
+ if not os.path.exists(localDir):
+ os.makedirs(localDir)
+
+ for f in filelist:
+ if f == '.' or f == '..':
+ continue
+ remotePath = remoteDir + '/' + f
+ localPath = os.path.join(localDir, f)
+ if self.dirExists(remotePath):
+ self.getDirectory(remotePath, localPath, False)
+ else:
+ self.getFile(remotePath, localPath)
+
+ def validateFile(self, remoteFile, localFile):
+ remoteHash = self._getRemoteHash(remoteFile)
+ localHash = self._getLocalHash(localFile)
+
+ if (remoteHash is None):
+ return False
+
+ if (remoteHash == localHash):
+ return True
+
+ return False
+
+ def _getRemoteHash(self, filename):
+ data = self._runCmds([{'cmd': 'hash ' + filename}]).strip()
+ self._logger.debug("remote hash returned: '%s'" % data)
+ return data
+
+ def unpackFile(self, filePath, destDir=None):
+ """
+ Unzips a bundle to a location on the device
+
+ If destDir is not specified, the bundle is extracted in the same directory
+ """
+ # if no destDir is passed in just set it to filePath's folder
+ if not destDir:
+ destDir = posixpath.dirname(filePath)
+
+ if destDir[-1] != '/':
+ destDir += '/'
+
+ self._runCmds([{'cmd': 'unzp %s %s' % (filePath, destDir)}])
+
+ def _getRebootServerSocket(self, ipAddr):
+ serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ serverSocket.settimeout(60.0)
+ serverSocket.bind((ipAddr, 0))
+ serverSocket.listen(1)
+ self._logger.debug('Created reboot callback server at %s:%d' %
+ serverSocket.getsockname())
+ return serverSocket
+
+ def _waitForRebootPing(self, serverSocket):
+ conn = None
+ data = None
+ startTime = datetime.datetime.now()
+ waitTime = datetime.timedelta(seconds=self.reboot_timeout)
+ while not data and datetime.datetime.now() - startTime < waitTime:
+ self._logger.info("Waiting for reboot callback ping from device...")
+ try:
+ if not conn:
+ conn, _ = serverSocket.accept()
+ # Receiving any data is good enough.
+ data = conn.recv(1024)
+ if data:
+ self._logger.info("Received reboot callback ping from device!")
+ conn.sendall('OK')
+ conn.close()
+ except socket.timeout:
+ pass
+ except socket.error as e:
+ if e.errno != errno.EAGAIN and e.errno != errno.EWOULDBLOCK:
+ raise
+
+ if not data:
+ raise DMError('Timed out waiting for reboot callback.')
+
+ self._logger.info("Sleeping for %s seconds to wait for device "
+ "to 'settle'" % self.reboot_settling_time)
+ time.sleep(self.reboot_settling_time)
+
+ def reboot(self, ipAddr=None, port=30000, wait=False):
+ # port ^^^ is here for backwards compatibility only, we now
+ # determine a port automatically and safely
+ wait = (wait or ipAddr)
+
+ cmd = 'rebt'
+
+ self._logger.info("Rebooting device")
+
+ # if we're waiting, create a listening server and pass information on
+ # it to the device before rebooting (we do this instead of just polling
+ # to make sure the device actually rebooted -- yes, there are probably
+ # simpler ways of doing this like polling uptime, but this is what we're
+ # doing for now)
+ if wait:
+ if not ipAddr:
+ ipAddr = moznetwork.get_ip()
+ serverSocket = self._getRebootServerSocket(ipAddr)
+ # The update.info command tells the SUTAgent to send a TCP message
+ # after restarting.
+ destname = '/data/data/com.mozilla.SUTAgentAndroid/files/update.info'
+ data = "%s,%s\rrebooting\r" % serverSocket.getsockname()
+ self._runCmds([{'cmd': 'push %s %s' % (destname, len(data)),
+ 'data': data}])
+ cmd += " %s %s" % serverSocket.getsockname()
+
+ # actually reboot device
+ self._runCmds([{'cmd': cmd}])
+ # if we're waiting, wait for a callback ping from the agent before
+ # continuing (and throw an exception if we don't receive said ping)
+ if wait:
+ self._waitForRebootPing(serverSocket)
+
+ def getInfo(self, directive=None):
+ data = None
+ result = {}
+ collapseSpaces = re.compile(' +')
+
+ directives = ['os', 'id', 'uptime', 'uptimemillis', 'systime', 'screen',
+ 'rotation', 'memory', 'process', 'disk', 'power', 'sutuserinfo',
+ 'temperature']
+ if (directive in directives):
+ directives = [directive]
+
+ for d in directives:
+ data = self._runCmds([{'cmd': 'info ' + d}])
+
+ data = collapseSpaces.sub(' ', data)
+ result[d] = data.split('\n')
+
+ # Get rid of any 0 length members of the arrays
+ for k, v in result.iteritems():
+ result[k] = filter(lambda x: x != '', result[k])
+
+ # Format the process output
+ if 'process' in result:
+ proclist = []
+ for l in result['process']:
+ if l:
+ proclist.append(l.split('\t'))
+ result['process'] = proclist
+
+ self._logger.debug("results: %s" % result)
+ return result
+
+ def installApp(self, appBundlePath, destPath=None):
+ cmd = 'inst ' + appBundlePath
+ if destPath:
+ cmd += ' ' + destPath
+
+ data = self._runCmds([{'cmd': cmd}])
+
+ if 'installation complete [0]' not in data:
+ raise DMError("Remove Device Error: Error installing app. Error message: %s" % data)
+
+ def uninstallApp(self, appName, installPath=None):
+ cmd = 'uninstall ' + appName
+ if installPath:
+ cmd += ' ' + installPath
+ data = self._runCmds([{'cmd': cmd}])
+
+ status = data.split('\n')[0].strip()
+ self._logger.debug("uninstallApp: '%s'" % status)
+ if status == 'Success':
+ return
+ raise DMError("Remote Device Error: uninstall failed for %s" % appName)
+
+ def uninstallAppAndReboot(self, appName, installPath=None):
+ cmd = 'uninst ' + appName
+ if installPath:
+ cmd += ' ' + installPath
+ data = self._runCmds([{'cmd': cmd}])
+
+ self._logger.debug("uninstallAppAndReboot: %s" % data)
+ return
+
+ def updateApp(self, appBundlePath, processName=None, destPath=None,
+ ipAddr=None, port=30000, wait=False):
+ # port ^^^ is here for backwards compatibility only, we now
+ # determine a port automatically and safely
+ wait = (wait or ipAddr)
+
+ cmd = 'updt '
+ if processName is None:
+ # Then we pass '' for processName
+ cmd += "'' " + appBundlePath
+ else:
+ cmd += processName + ' ' + appBundlePath
+
+ if destPath:
+ cmd += " " + destPath
+
+ if wait:
+ if not ipAddr:
+ ipAddr = moznetwork.get_ip()
+ serverSocket = self._getRebootServerSocket(ipAddr)
+ cmd += " %s %s" % serverSocket.getsockname()
+
+ self._logger.debug("updateApp using command: " % cmd)
+
+ self._runCmds([{'cmd': cmd}])
+
+ if wait:
+ self._waitForRebootPing(serverSocket)
+
+ def getCurrentTime(self):
+ return int(self._runCmds([{'cmd': 'clok'}]).strip())
+
+ def _formatEnvString(self, env):
+ """
+ Returns a properly formatted env string for the agent.
+
+ Input - env, which is either None, '', or a dict
+ Output - a quoted string of the form: '"envvar1=val1,envvar2=val2..."'
+ If env is None or '' return '' (empty quoted string)
+ """
+ if (env is None or env == ''):
+ return ''
+
+ retVal = '"%s"' % ','.join(map(lambda x: '%s=%s' % (x[0], x[1]), env.iteritems()))
+ if (retVal == '""'):
+ return ''
+
+ return retVal
+
+ def adjustResolution(self, width=1680, height=1050, type='hdmi'):
+ """
+ Adjust the screen resolution on the device, REBOOT REQUIRED
+
+ NOTE: this only works on a tegra ATM
+
+ supported resolutions: 640x480, 800x600, 1024x768, 1152x864, 1200x1024, 1440x900,
+ 1680x1050, 1920x1080
+ """
+ if self.getInfo('os')['os'][0].split()[0] != 'harmony-eng':
+ self._logger.warning("unable to adjust screen resolution on non Tegra device")
+ return False
+
+ results = self.getInfo('screen')
+ parts = results['screen'][0].split(':')
+ self._logger.debug("we have a current resolution of %s, %s" %
+ (parts[1].split()[0], parts[2].split()[0]))
+
+ # verify screen type is valid, and set it to the proper value
+ # (https://bugzilla.mozilla.org/show_bug.cgi?id=632895#c4)
+ screentype = -1
+ if (type == 'hdmi'):
+ screentype = 5
+ elif (type == 'vga' or type == 'crt'):
+ screentype = 3
+ else:
+ return False
+
+ # verify we have numbers
+ if not (isinstance(width, int) and isinstance(height, int)):
+ return False
+
+ if (width < 100 or width > 9999):
+ return False
+
+ if (height < 100 or height > 9999):
+ return False
+
+ self._logger.debug("adjusting screen resolution to %s, %s and rebooting" % (width, height))
+
+ self._runCmds(
+ [{'cmd': "exec setprop persist.tegra.dpy%s.mode.width %s" % (screentype, width)}])
+ self._runCmds(
+ [{'cmd': "exec setprop persist.tegra.dpy%s.mode.height %s" % (screentype, height)}])
+
+ def chmodDir(self, remoteDir, **kwargs):
+ self._runCmds([{'cmd': "chmod " + remoteDir}])
diff --git a/testing/mozbase/mozdevice/mozdevice/dmcli.py b/testing/mozbase/mozdevice/mozdevice/dmcli.py
new file mode 100644
index 000000000..7ba65e842
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/dmcli.py
@@ -0,0 +1,382 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Command-line client to control a device
+"""
+
+import errno
+import logging
+import os
+import posixpath
+import StringIO
+import sys
+import mozdevice
+import mozlog
+import argparse
+
+
+class DMCli(object):
+
+ def __init__(self):
+ self.commands = {'deviceroot': {'function': self.deviceroot,
+ 'help': 'get device root directory for storing temporary '
+ 'files'},
+ 'install': {'function': self.install,
+ 'args': [{'name': 'file'}],
+ 'help': 'push this package file to the device'
+ ' and install it'},
+ 'uninstall': {'function': self.uninstall,
+ 'args': [{'name': 'packagename'}],
+ 'help': 'uninstall the named app from the device'},
+ 'killapp': {'function': self.kill,
+ 'args': [{'name': 'process_name', 'nargs': '*'}],
+ 'help': 'kills any processes with name(s) on device'},
+ 'launchapp': {'function': self.launchapp,
+ 'args': [{'name': 'appname'},
+ {'name': 'activity_name'},
+ {'name': '--intent',
+ 'action': 'store',
+ 'default': 'android.intent.action.VIEW'},
+ {'name': '--url',
+ 'action': 'store'},
+ {'name': '--no-fail-if-running',
+ 'action': 'store_true',
+ 'help': 'Don\'t fail if application is'
+ ' already running'}
+ ],
+ 'help': 'launches application on device'},
+ 'listapps': {'function': self.listapps,
+ 'help': 'list applications on device'},
+ 'push': {'function': self.push,
+ 'args': [{'name': 'local_file'},
+ {'name': 'remote_file'}
+ ],
+ 'help': 'copy file/dir to device'},
+ 'pull': {'function': self.pull,
+ 'args': [{'name': 'local_file'},
+ {'name': 'remote_file', 'nargs': '?'}],
+ 'help': 'copy file/dir from device'},
+ 'shell': {'function': self.shell,
+ 'args': [{'name': 'command', 'nargs': argparse.REMAINDER},
+ {'name': '--root', 'action': 'store_true',
+ 'help': 'Run command as root'}],
+ 'help': 'run shell command on device'},
+ 'info': {'function': self.getinfo,
+ 'args': [{'name': 'directive', 'nargs': '?'}],
+ 'help': 'get information on specified '
+ 'aspect of the device (if no argument '
+ 'given, print all available information)'
+ },
+ 'ps': {'function': self.processlist,
+ 'help': 'get information on running processes on device'
+ },
+ 'logcat': {'function': self.logcat,
+ 'help': 'get logcat from device'
+ },
+ 'ls': {'function': self.listfiles,
+ 'args': [{'name': 'remote_dir'}],
+ 'help': 'list files on device'
+ },
+ 'rm': {'function': self.removefile,
+ 'args': [{'name': 'remote_file'}],
+ 'help': 'remove file from device'
+ },
+ 'isdir': {'function': self.isdir,
+ 'args': [{'name': 'remote_dir'}],
+ 'help': 'print if remote file is a directory'
+ },
+ 'mkdir': {'function': self.mkdir,
+ 'args': [{'name': 'remote_dir'}],
+ 'help': 'makes a directory on device'
+ },
+ 'rmdir': {'function': self.rmdir,
+ 'args': [{'name': 'remote_dir'}],
+ 'help': 'recursively remove directory from device'
+ },
+ 'screencap': {'function': self.screencap,
+ 'args': [{'name': 'png_file'}],
+ 'help': 'capture screenshot of device in action'
+ },
+ 'sutver': {'function': self.sutver,
+ 'help': 'SUTAgent\'s product name and version (SUT only)'
+ },
+ 'clearlogcat': {'function': self.clearlogcat,
+ 'help': 'clear the logcat'
+ },
+ 'reboot': {'function': self.reboot,
+ 'help': 'reboot the device',
+ 'args': [{'name': '--wait',
+ 'action': 'store_true',
+ 'help': 'Wait for device to come back up'
+ ' before exiting'}]
+
+ },
+ 'isfile': {'function': self.isfile,
+ 'args': [{'name': 'remote_file'}],
+ 'help': 'check whether a file exists on the device'
+ },
+ 'launchfennec': {'function': self.launchfennec,
+ 'args': [{'name': 'appname'},
+ {'name': '--intent', 'action': 'store',
+ 'default': 'android.intent.action.VIEW'},
+ {'name': '--url', 'action': 'store'},
+ {'name': '--extra-args', 'action': 'store'},
+ {'name': '--mozenv', 'action': 'store',
+ 'help': 'Gecko environment variables to set'
+ ' in "KEY1=VAL1 KEY2=VAL2" format'},
+ {'name': '--no-fail-if-running',
+ 'action': 'store_true',
+ 'help': 'Don\'t fail if application is '
+ 'already running'}
+ ],
+ 'help': 'launch fennec'
+ },
+ 'getip': {'function': self.getip,
+ 'args': [{'name': 'interface', 'nargs': '*'}],
+ 'help': 'get the ip address of the device'
+ }
+ }
+
+ self.parser = argparse.ArgumentParser()
+ self.add_options(self.parser)
+ self.add_commands(self.parser)
+ mozlog.commandline.add_logging_group(self.parser)
+
+ def run(self, args=sys.argv[1:]):
+ args = self.parser.parse_args()
+
+ mozlog.commandline.setup_logging(
+ 'mozdevice', args, {'mach': sys.stdout})
+
+ if args.dmtype == "sut" and not args.host and not args.hwid:
+ self.parser.error("Must specify device ip in TEST_DEVICE or "
+ "with --host option with SUT")
+
+ self.dm = self.getDevice(dmtype=args.dmtype, hwid=args.hwid,
+ host=args.host, port=args.port,
+ verbose=args.verbose)
+
+ ret = args.func(args)
+ if ret is None:
+ ret = 0
+
+ sys.exit(ret)
+
+ def add_options(self, parser):
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="Verbose output from DeviceManager",
+ default=bool(os.environ.get('VERBOSE')))
+ parser.add_argument("--host", action="store",
+ help="Device hostname (only if using TCP/IP, "
+ "defaults to TEST_DEVICE environment "
+ "variable if present)",
+ default=os.environ.get('TEST_DEVICE'))
+ parser.add_argument("-p", "--port", action="store",
+ type=int,
+ help="Custom device port (if using SUTAgent or "
+ "adb-over-tcp)", default=None)
+ parser.add_argument("-m", "--dmtype", action="store",
+ help="DeviceManager type (adb or sut, defaults "
+ "to DM_TRANS environment variable, if "
+ "present, or adb)",
+ default=os.environ.get('DM_TRANS', 'adb'))
+ parser.add_argument("-d", "--hwid", action="store",
+ help="HWID", default=None)
+ parser.add_argument("--package-name", action="store",
+ help="Packagename (if using DeviceManagerADB)",
+ default=None)
+
+ def add_commands(self, parser):
+ subparsers = parser.add_subparsers(title="Commands", metavar="<command>")
+ for (commandname, commandprops) in sorted(self.commands.iteritems()):
+ subparser = subparsers.add_parser(commandname, help=commandprops['help'])
+ if commandprops.get('args'):
+ for arg in commandprops['args']:
+ # this is more elegant but doesn't work in python 2.6
+ # (which we still use on tbpl @ mozilla where we install
+ # this package)
+ # kwargs = { k: v for k,v in arg.items() if k is not 'name' }
+ kwargs = {}
+ for (k, v) in arg.items():
+ if k is not 'name':
+ kwargs[k] = v
+ subparser.add_argument(arg['name'], **kwargs)
+ subparser.set_defaults(func=commandprops['function'])
+
+ def getDevice(self, dmtype="adb", hwid=None, host=None, port=None,
+ packagename=None, verbose=False):
+ '''
+ Returns a device with the specified parameters
+ '''
+ logLevel = logging.ERROR
+ if verbose:
+ logLevel = logging.DEBUG
+
+ if hwid:
+ return mozdevice.DroidConnectByHWID(hwid, logLevel=logLevel)
+
+ if dmtype == "adb":
+ if host and not port:
+ port = 5555
+ return mozdevice.DroidADB(packageName=packagename,
+ host=host, port=port,
+ logLevel=logLevel)
+ elif dmtype == "sut":
+ if not host:
+ self.parser.error("Must specify host with SUT!")
+ if not port:
+ port = 20701
+ return mozdevice.DroidSUT(host=host, port=port,
+ logLevel=logLevel)
+ else:
+ self.parser.error("Unknown device manager type: %s" % type)
+
+ def deviceroot(self, args):
+ print self.dm.deviceRoot
+
+ def push(self, args):
+ (src, dest) = (args.local_file, args.remote_file)
+ if os.path.isdir(src):
+ self.dm.pushDir(src, dest)
+ else:
+ dest_is_dir = dest[-1] == '/' or self.dm.dirExists(dest)
+ dest = posixpath.normpath(dest)
+ if dest_is_dir:
+ dest = posixpath.join(dest, os.path.basename(src))
+ self.dm.pushFile(src, dest)
+
+ def pull(self, args):
+ (src, dest) = (args.local_file, args.remote_file)
+ if not self.dm.fileExists(src):
+ print 'No such file or directory'
+ return
+ if not dest:
+ dest = posixpath.basename(src)
+ if self.dm.dirExists(src):
+ self.dm.getDirectory(src, dest)
+ else:
+ self.dm.getFile(src, dest)
+
+ def install(self, args):
+ basename = os.path.basename(args.file)
+ app_path_on_device = posixpath.join(self.dm.deviceRoot,
+ basename)
+ self.dm.pushFile(args.file, app_path_on_device)
+ self.dm.installApp(app_path_on_device)
+
+ def uninstall(self, args):
+ self.dm.uninstallApp(args.packagename)
+
+ def launchapp(self, args):
+ self.dm.launchApplication(args.appname, args.activity_name,
+ args.intent, url=args.url,
+ failIfRunning=(not args.no_fail_if_running))
+
+ def listapps(self, args):
+ for app in self.dm.getInstalledApps():
+ print app
+
+ def stopapp(self, args):
+ self.dm.stopApplication(args.appname)
+
+ def kill(self, args):
+ for name in args.process_name:
+ self.dm.killProcess(name)
+
+ def shell(self, args):
+ buf = StringIO.StringIO()
+ self.dm.shell(args.command, buf, root=args.root)
+ print str(buf.getvalue()[0:-1]).rstrip()
+
+ def getinfo(self, args):
+ info = self.dm.getInfo(directive=args.directive)
+ for (infokey, infoitem) in sorted(info.iteritems()):
+ if infokey == "process":
+ pass # skip process list: get that through ps
+ elif args.directive is None:
+ print "%s: %s" % (infokey.upper(), infoitem)
+ else:
+ print infoitem
+
+ def logcat(self, args):
+ print ''.join(self.dm.getLogcat())
+
+ def clearlogcat(self, args):
+ self.dm.recordLogcat()
+
+ def reboot(self, args):
+ self.dm.reboot(wait=args.wait)
+
+ def processlist(self, args):
+ pslist = self.dm.getProcessList()
+ for ps in pslist:
+ print " ".join(str(i) for i in ps)
+
+ def listfiles(self, args):
+ filelist = self.dm.listFiles(args.remote_dir)
+ for file in filelist:
+ print file
+
+ def removefile(self, args):
+ self.dm.removeFile(args.remote_file)
+
+ def isdir(self, args):
+ if self.dm.dirExists(args.remote_dir):
+ print "TRUE"
+ return
+
+ print "FALSE"
+ return errno.ENOTDIR
+
+ def mkdir(self, args):
+ self.dm.mkDir(args.remote_dir)
+
+ def rmdir(self, args):
+ self.dm.removeDir(args.remote_dir)
+
+ def screencap(self, args):
+ self.dm.saveScreenshot(args.png_file)
+
+ def sutver(self, args):
+ if args.dmtype == 'sut':
+ print '%s Version %s' % (self.dm.agentProductName,
+ self.dm.agentVersion)
+ else:
+ print 'Must use SUT transport to get SUT version.'
+
+ def isfile(self, args):
+ if self.dm.fileExists(args.remote_file):
+ print "TRUE"
+ return
+ print "FALSE"
+ return errno.ENOENT
+
+ def launchfennec(self, args):
+ mozEnv = None
+ if args.mozenv:
+ mozEnv = {}
+ keyvals = args.mozenv.split()
+ for keyval in keyvals:
+ (key, _, val) = keyval.partition("=")
+ mozEnv[key] = val
+ self.dm.launchFennec(args.appname, intent=args.intent,
+ mozEnv=mozEnv,
+ extraArgs=args.extra_args, url=args.url,
+ failIfRunning=(not args.no_fail_if_running))
+
+ def getip(self, args):
+ if args.interface:
+ print(self.dm.getIP(args.interface))
+ else:
+ print(self.dm.getIP())
+
+
+def cli(args=sys.argv[1:]):
+ # process the command line
+ cli = DMCli()
+ cli.run(args)
+
+if __name__ == '__main__':
+ cli()
diff --git a/testing/mozbase/mozdevice/mozdevice/droid.py b/testing/mozbase/mozdevice/mozdevice/droid.py
new file mode 100644
index 000000000..f06a619c4
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/droid.py
@@ -0,0 +1,263 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import StringIO
+import moznetwork
+import re
+import threading
+import time
+
+import version_codes
+
+from Zeroconf import Zeroconf, ServiceBrowser
+from devicemanager import ZeroconfListener
+from devicemanagerADB import DeviceManagerADB
+from devicemanagerSUT import DeviceManagerSUT
+from devicemanager import DMError
+
+
+class DroidMixin(object):
+ """Mixin to extend DeviceManager with Android-specific functionality"""
+
+ _stopApplicationNeedsRoot = True
+
+ def _getExtraAmStartArgs(self):
+ return []
+
+ def launchApplication(self, appName, activityName, intent, url=None,
+ extras=None, wait=True, failIfRunning=True):
+ """
+ Launches an Android application
+
+ :param appName: Name of application (e.g. `com.android.chrome`)
+ :param activityName: Name of activity to launch (e.g. `.Main`)
+ :param intent: Intent to launch application with
+ :param url: URL to open
+ :param extras: Dictionary of extra arguments to launch application with
+ :param wait: If True, wait for application to start before returning
+ :param failIfRunning: Raise an exception if instance of application is already running
+ """
+
+ # If failIfRunning is True, we throw an exception here. Only one
+ # instance of an application can be running at once on Android,
+ # starting a new instance may not be what we want depending on what
+ # we want to do
+ if failIfRunning and self.processExist(appName):
+ raise DMError("Only one instance of an application may be running "
+ "at once")
+
+ acmd = ["am", "start"] + self._getExtraAmStartArgs() + \
+ ["-W" if wait else '', "-n", "%s/%s" % (appName, activityName)]
+
+ if intent:
+ acmd.extend(["-a", intent])
+
+ if extras:
+ for (key, val) in extras.iteritems():
+ if type(val) is int:
+ extraTypeParam = "--ei"
+ elif type(val) is bool:
+ extraTypeParam = "--ez"
+ else:
+ extraTypeParam = "--es"
+ acmd.extend([extraTypeParam, str(key), str(val)])
+
+ if url:
+ acmd.extend(["-d", url])
+
+ # shell output not that interesting and debugging logs should already
+ # show what's going on here... so just create an empty memory buffer
+ # and ignore (except on error)
+ shellOutput = StringIO.StringIO()
+ if self.shell(acmd, shellOutput) == 0:
+ return
+
+ shellOutput.seek(0)
+ raise DMError("Unable to launch application (shell output: '%s')" % shellOutput.read())
+
+ def launchFennec(self, appName, intent="android.intent.action.VIEW",
+ mozEnv=None, extraArgs=None, url=None, wait=True,
+ failIfRunning=True):
+ """
+ Convenience method to launch Fennec on Android with various debugging
+ arguments
+
+ :param appName: Name of fennec application (e.g. `org.mozilla.fennec`)
+ :param intent: Intent to launch application with
+ :param mozEnv: Mozilla specific environment to pass into application
+ :param extraArgs: Extra arguments to be parsed by fennec
+ :param url: URL to open
+ :param wait: If True, wait for application to start before returning
+ :param failIfRunning: Raise an exception if instance of application is already running
+ """
+ extras = {}
+
+ if mozEnv:
+ # mozEnv is expected to be a dictionary of environment variables: Fennec
+ # itself will set them when launched
+ for (envCnt, (envkey, envval)) in enumerate(mozEnv.iteritems()):
+ extras["env" + str(envCnt)] = envkey + "=" + envval
+
+ # Additional command line arguments that fennec will read and use (e.g.
+ # with a custom profile)
+ if extraArgs:
+ extras['args'] = " ".join(extraArgs)
+
+ self.launchApplication(appName, "org.mozilla.gecko.BrowserApp", intent, url=url,
+ extras=extras,
+ wait=wait, failIfRunning=failIfRunning)
+
+ def getInstalledApps(self):
+ """
+ Lists applications installed on this Android device
+
+ Returns a list of application names in the form [ 'org.mozilla.fennec', ... ]
+ """
+ output = self.shellCheckOutput(["pm", "list", "packages", "-f"])
+ apps = []
+ for line in output.splitlines():
+ # lines are of form: package:/system/app/qik-tmo.apk=com.qiktmobile.android
+ apps.append(line.split('=')[1])
+
+ return apps
+
+ def stopApplication(self, appName):
+ """
+ Stops the specified application
+
+ For Android 3.0+, we use the "am force-stop" to do this, which is
+ reliable and does not require root. For earlier versions of Android,
+ we simply try to manually kill the processes started by the app
+ repeatedly until none is around any more. This is less reliable and
+ does require root.
+
+ :param appName: Name of application (e.g. `com.android.chrome`)
+ """
+ version = self.shellCheckOutput(["getprop", "ro.build.version.sdk"])
+ if int(version) >= version_codes.HONEYCOMB:
+ self.shellCheckOutput(["am", "force-stop", appName],
+ root=self._stopApplicationNeedsRoot)
+ else:
+ num_tries = 0
+ max_tries = 5
+ while self.processExist(appName):
+ if num_tries > max_tries:
+ raise DMError("Couldn't successfully kill %s after %s "
+ "tries" % (appName, max_tries))
+ self.killProcess(appName)
+ num_tries += 1
+
+ # sleep for a short duration to make sure there are no
+ # additional processes in the process of being launched
+ # (this is not 100% guaranteed to work since it is inherently
+ # racey, but it's the best we can do)
+ time.sleep(1)
+
+
+class DroidADB(DeviceManagerADB, DroidMixin):
+
+ _stopApplicationNeedsRoot = False
+
+ def getTopActivity(self):
+ package = None
+ data = None
+ try:
+ data = self.shellCheckOutput(
+ ["dumpsys", "window", "windows"], timeout=self.short_timeout)
+ except:
+ # dumpsys seems to intermittently fail (seen on 4.3 emulator), producing
+ # no output.
+ return ""
+ # "dumpsys window windows" produces many lines of input. The top/foreground
+ # activity is indicated by something like:
+ # mFocusedApp=AppWindowToken{483e6db0 token=HistoryRecord{484dcad8 com.mozilla.SUTAgentAndroid/.SUTAgentAndroid}} # noqa
+ # or, on other devices:
+ # FocusedApplication: name='AppWindowToken{41a65340 token=ActivityRecord{418fbd68 org.mozilla.fennec_mozdev/org.mozilla.gecko.BrowserApp}}', dispatchingTimeout=5000.000ms # noqa
+ # Extract this line, ending in the forward slash:
+ m = re.search('mFocusedApp(.+)/', data)
+ if not m:
+ m = re.search('FocusedApplication(.+)/', data)
+ if m:
+ line = m.group(0)
+ # Extract package name: string of non-whitespace ending in forward slash
+ m = re.search('(\S+)/$', line)
+ if m:
+ package = m.group(1)
+ if not package:
+ # On some Android 4.4 devices, when the home screen is displayed,
+ # dumpsys reports "mFocusedApp=null". Guard against this case and
+ # others where the focused app can not be determined by returning
+ # an empty string -- same as sutagent.
+ package = ""
+ return package
+
+ def getAppRoot(self, packageName):
+ """
+ Returns the root directory for the specified android application
+ """
+ # relying on convention
+ return '/data/data/%s' % packageName
+
+
+class DroidSUT(DeviceManagerSUT, DroidMixin):
+
+ def _getExtraAmStartArgs(self):
+ # in versions of android in jellybean and beyond, the agent may run as
+ # a different process than the one that started the app. In this case,
+ # we need to get back the original user serial number and then pass
+ # that to the 'am start' command line
+ if not hasattr(self, '_userSerial'):
+ infoDict = self.getInfo(directive="sutuserinfo")
+ if infoDict.get('sutuserinfo') and \
+ len(infoDict['sutuserinfo']) > 0:
+ userSerialString = infoDict['sutuserinfo'][0]
+ # user serial always an integer, see:
+ # http://developer.android.com/reference/android/os/UserManager.html#getSerialNumberForUser%28android.os.UserHandle%29
+ m = re.match('User Serial:([0-9]+)', userSerialString)
+ if m:
+ self._userSerial = m.group(1)
+ else:
+ self._userSerial = None
+ else:
+ self._userSerial = None
+
+ if self._userSerial is not None:
+ return ["--user", self._userSerial]
+
+ return []
+
+ def getTopActivity(self):
+ return self._runCmds([{'cmd': "activity"}]).strip()
+
+ def getAppRoot(self, packageName):
+ return self._runCmds([{'cmd': 'getapproot %s' % packageName}]).strip()
+
+
+def DroidConnectByHWID(hwid, timeout=30, **kwargs):
+ """Try to connect to the given device by waiting for it to show up using
+ mDNS with the given timeout."""
+ zc = Zeroconf(moznetwork.get_ip())
+
+ evt = threading.Event()
+ listener = ZeroconfListener(hwid, evt)
+ sb = ServiceBrowser(zc, "_sutagent._tcp.local.", listener)
+ foundIP = None
+ if evt.wait(timeout):
+ # we found the hwid
+ foundIP = listener.ip
+ sb.cancel()
+ zc.close()
+
+ if foundIP is not None:
+ return DroidSUT(foundIP, **kwargs)
+ print "Connected via SUT to %s [at %s]" % (hwid, foundIP)
+
+ # try connecting via adb
+ try:
+ sut = DroidADB(deviceSerial=hwid, **kwargs)
+ except:
+ return None
+
+ print "Connected via ADB to %s" % (hwid)
+ return sut
diff --git a/testing/mozbase/mozdevice/mozdevice/sutini.py b/testing/mozbase/mozdevice/mozdevice/sutini.py
new file mode 100644
index 000000000..7dd5e54c4
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/sutini.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import ConfigParser
+import StringIO
+import os
+import sys
+import tempfile
+
+from mozdevice.droid import DroidSUT
+from mozdevice.devicemanager import DMError
+
+USAGE = '%s <host>'
+INI_PATH_JAVA = '/data/data/com.mozilla.SUTAgentAndroid/files/SUTAgent.ini'
+INI_PATH_NEGATUS = '/data/local/SUTAgent.ini'
+SCHEMA = {'Registration Server': (('IPAddr', ''),
+ ('PORT', '28001'),
+ ('HARDWARE', ''),
+ ('POOL', '')),
+ 'Network Settings': (('SSID', ''),
+ ('AUTH', ''),
+ ('ENCR', ''),
+ ('EAP', ''))}
+
+
+def get_cfg(d, ini_path):
+ cfg = ConfigParser.RawConfigParser()
+ try:
+ cfg.readfp(StringIO.StringIO(d.pullFile(ini_path)), 'SUTAgent.ini')
+ except DMError:
+ # assume this is due to a missing file...
+ pass
+ return cfg
+
+
+def put_cfg(d, cfg, ini_path):
+ print 'Writing modified SUTAgent.ini...'
+ t = tempfile.NamedTemporaryFile(delete=False)
+ cfg.write(t)
+ t.close()
+ try:
+ d.pushFile(t.name, ini_path)
+ except DMError, e:
+ print e
+ else:
+ print 'Done.'
+ finally:
+ os.unlink(t.name)
+
+
+def set_opt(cfg, s, o, dflt):
+ prompt = ' %s' % o
+ try:
+ curval = cfg.get(s, o)
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
+ curval = ''
+ if curval:
+ dflt = curval
+ prompt += ': '
+ if dflt:
+ prompt += '[%s] ' % dflt
+ newval = raw_input(prompt)
+ if not newval:
+ newval = dflt
+ if newval == curval:
+ return False
+ cfg.set(s, o, newval)
+ return True
+
+
+def bool_query(prompt, dflt):
+ while True:
+ i = raw_input('%s [%s] ' % (prompt, 'y' if dflt else 'n')).lower()
+ if not i or i[0] in ('y', 'n'):
+ break
+ print 'Enter y or n.'
+ return (not i and dflt) or (i and i[0] == 'y')
+
+
+def edit_sect(cfg, sect, opts):
+ changed_vals = False
+ if bool_query('Edit section %s?' % sect, False):
+ if not cfg.has_section(sect):
+ cfg.add_section(sect)
+ print '%s settings:' % sect
+ for opt, dflt in opts:
+ changed_vals |= set_opt(cfg, sect, opt, dflt)
+ print
+ else:
+ if cfg.has_section(sect) and bool_query('Delete section %s?' % sect,
+ False):
+ cfg.remove_section(sect)
+ changed_vals = True
+ return changed_vals
+
+
+def main():
+ try:
+ host = sys.argv[1]
+ except IndexError:
+ print USAGE % sys.argv[0]
+ sys.exit(1)
+ try:
+ d = DroidSUT(host, retryLimit=1)
+ except DMError, e:
+ print e
+ sys.exit(1)
+ # check if using Negatus and change path accordingly
+ ini_path = INI_PATH_JAVA
+ if 'Negatus' in d.agentProductName:
+ ini_path = INI_PATH_NEGATUS
+ cfg = get_cfg(d, ini_path)
+ if not cfg.sections():
+ print 'Empty or missing ini file.'
+ changed_vals = False
+ for sect, opts in SCHEMA.iteritems():
+ changed_vals |= edit_sect(cfg, sect, opts)
+ if changed_vals:
+ put_cfg(d, cfg, ini_path)
+ else:
+ print 'No changes.'
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/mozdevice/mozdevice/version_codes.py b/testing/mozbase/mozdevice/mozdevice/version_codes.py
new file mode 100644
index 000000000..6602d837a
--- /dev/null
+++ b/testing/mozbase/mozdevice/mozdevice/version_codes.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+VERSION CODES of the android releases.
+
+See http://developer.android.com/reference/android/os/Build.VERSION_CODES.html.
+"""
+
+# Magic version number for a current development build, which has
+# not yet turned into an official release.
+CUR_DEVELOPMENT = 10000
+
+# October 2008: The original, first, version of Android
+BASE = 1
+# February 2009: First Android update, officially called 1.1
+BASE_1_1 = 2
+# May 2009: Android 1.5
+CUPCAKE = 3
+# September 2009: Android 1.6
+DONUT = 4
+# November 2009: Android 2.0
+ECLAIR = 5
+# December 2009: Android 2.0.1
+ECLAIR_0_1 = 6
+# January 2010: Android 2.1
+ECLAIR_MR1 = 7
+# June 2010: Android 2.2
+FROYO = 8
+# November 2010: Android 2.3
+GINGERBREAD = 9
+# February 2011: Android 2.3.3
+GINGERBREAD_MR1 = 10
+# February 2011: Android 3.0
+HONEYCOMB = 11
+# May 2011: Android 3.1
+HONEYCOMB_MR1 = 12
+# June 2011: Android 3.2
+HONEYCOMB_MR2 = 13
+# October 2011: Android 4.0
+ICE_CREAM_SANDWICH = 14
+# December 2011: Android 4.0.3
+ICE_CREAM_SANDWICH_MR1 = 15
+# June 2012: Android 4.1
+JELLY_BEAN = 16
+# November 2012: Android 4.2
+JELLY_BEAN_MR1 = 17
+# July 2013: Android 4.3
+JELLY_BEAN_MR2 = 18
+# October 2013: Android 4.4
+KITKAT = 19
+# Android 4.4W
+KITKAT_WATCH = 20
+# Lollilop
+LOLLIPOP = 21
+LOLLIPOP_MR1 = 22
+# M
+M = 23
+# N
+N = 24
diff --git a/testing/mozbase/mozdevice/setup.py b/testing/mozbase/mozdevice/setup.py
new file mode 100644
index 000000000..477227f28
--- /dev/null
+++ b/testing/mozbase/mozdevice/setup.py
@@ -0,0 +1,36 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_NAME = 'mozdevice'
+PACKAGE_VERSION = '0.48'
+
+deps = ['mozfile >= 1.0',
+ 'mozlog >= 3.0',
+ 'moznetwork >= 0.24',
+ 'mozprocess >= 0.19',
+ ]
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Mozilla-authored device management",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='',
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozdevice'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ dm = mozdevice.dmcli:cli
+ sutini = mozdevice.sutini:main
+ """,
+ )
diff --git a/testing/mozbase/mozdevice/sut_tests/README.md b/testing/mozbase/mozdevice/sut_tests/README.md
new file mode 100644
index 000000000..ffc100f45
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/README.md
@@ -0,0 +1,15 @@
+# SUT Agent tests
+
+* In order to run these tests you need to have a phone running SUT Agent
+connected.
+
+* Make sure you can reach the device's TCP 20700 and 20701 ports. Doing
+*adb forward tcp:20700 tcp:20700 && adb forward tcp:20701 tcp:20701* will
+forward your localhost 20700 and 20701 ports to the ones on the device.
+
+* You might need some common tools like cp. Use the `setup-tools.sh` script
+to install them. It requires `$ADB` to point to the `adb` binary on the system.
+
+* Make sure the SUTAgent on the device is running.
+
+* Run: python runtests.py
diff --git a/testing/mozbase/mozdevice/sut_tests/dmunit.py b/testing/mozbase/mozdevice/sut_tests/dmunit.py
new file mode 100644
index 000000000..b4a3d0b9b
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/dmunit.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import types
+import unittest
+
+from mozdevice import devicemanager
+from mozdevice import devicemanagerSUT
+
+ip = ''
+port = 0
+heartbeat_port = 0
+log_level = logging.ERROR
+
+
+class DeviceManagerTestCase(unittest.TestCase):
+ """DeviceManager tests should subclass this.
+ """
+
+ """Set to False in your derived class if this test
+ should not be run on the Python agent.
+ """
+ runs_on_test_device = True
+
+ def _setUp(self):
+ """ Override this if you want set-up code in your test."""
+ return
+
+ def setUp(self):
+ self.dm = devicemanagerSUT.DeviceManagerSUT(host=ip, port=port,
+ logLevel=log_level)
+ self.dmerror = devicemanager.DMError
+ self._setUp()
+
+
+class DeviceManagerTestLoader(unittest.TestLoader):
+
+ def __init__(self, isTestDevice=False):
+ self.isTestDevice = isTestDevice
+
+ def loadTestsFromModuleName(self, module_name):
+ """Loads tests from modules unless the SUT is a test device and
+ the test case has runs_on_test_device set to False
+ """
+ tests = []
+ module = __import__(module_name)
+ for name in dir(module):
+ obj = getattr(module, name)
+ if (isinstance(obj, (type, types.ClassType)) and
+ issubclass(obj, unittest.TestCase)) and \
+ (not self.isTestDevice or obj.runs_on_test_device):
+ tests.append(self.loadTestsFromTestCase(obj))
+ return self.suiteClass(tests)
diff --git a/testing/mozbase/mozdevice/sut_tests/genfiles.py b/testing/mozbase/mozdevice/sut_tests/genfiles.py
new file mode 100644
index 000000000..5ab8d6349
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/genfiles.py
@@ -0,0 +1,85 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from random import randint
+from zipfile import ZipFile
+import os
+import shutil
+
+
+def gen_binary_file(path, size):
+ with open(path, 'wb') as f:
+ for i in xrange(size):
+ byte = '%c' % randint(0, 255)
+ f.write(byte)
+
+
+def gen_zip(path, files, stripped_prefix=''):
+ with ZipFile(path, 'w') as z:
+ for f in files:
+ new_name = f.replace(stripped_prefix, '')
+ z.write(f, new_name)
+
+
+def mkdir(path, *args):
+ try:
+ os.mkdir(path, *args)
+ except OSError:
+ pass
+
+
+def gen_folder_structure():
+ root = 'test-files'
+ prefix = os.path.join(root, 'push2')
+ mkdir(prefix)
+
+ gen_binary_file(os.path.join(prefix, 'file4.bin'), 59036)
+ mkdir(os.path.join(prefix, 'sub1'))
+ shutil.copyfile(os.path.join(root, 'mytext.txt'),
+ os.path.join(prefix, 'sub1', 'file1.txt'))
+ mkdir(os.path.join(prefix, 'sub1', 'sub1.1'))
+ shutil.copyfile(os.path.join(root, 'mytext.txt'),
+ os.path.join(prefix, 'sub1', 'sub1.1', 'file2.txt'))
+ mkdir(os.path.join(prefix, 'sub2'))
+ shutil.copyfile(os.path.join(root, 'mytext.txt'),
+ os.path.join(prefix, 'sub2', 'file3.txt'))
+
+
+def gen_test_files():
+ gen_folder_structure()
+ flist = [
+ os.path.join('test-files', 'push2'),
+ os.path.join('test-files', 'push2', 'file4.bin'),
+ os.path.join('test-files', 'push2', 'sub1'),
+ os.path.join('test-files', 'push2', 'sub1', 'file1.txt'),
+ os.path.join('test-files', 'push2', 'sub1', 'sub1.1'),
+ os.path.join('test-files', 'push2', 'sub1', 'sub1.1', 'file2.txt'),
+ os.path.join('test-files', 'push2', 'sub2'),
+ os.path.join('test-files', 'push2', 'sub2', 'file3.txt')
+ ]
+ gen_zip(os.path.join('test-files', 'mybinary.zip'),
+ flist, stripped_prefix=('test-files' + os.path.sep))
+ gen_zip(os.path.join('test-files', 'mytext.zip'),
+ [os.path.join('test-files', 'mytext.txt')])
+
+
+def clean_test_files():
+ ds = [os.path.join('test-files', d) for d in ('push1', 'push2')]
+ for d in ds:
+ try:
+ shutil.rmtree(d)
+ except OSError:
+ pass
+
+ fs = [os.path.join('test-files', f) for f in ('mybinary.zip', 'mytext.zip')]
+ for f in fs:
+ try:
+ os.remove(f)
+ except OSError:
+ pass
+
+
+if __name__ == '__main__':
+ gen_test_files()
diff --git a/testing/mozbase/mozdevice/sut_tests/runtests.py b/testing/mozbase/mozdevice/sut_tests/runtests.py
new file mode 100644
index 000000000..fffc306e3
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/runtests.py
@@ -0,0 +1,96 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from optparse import OptionParser
+import logging
+import os
+import re
+import sys
+import unittest
+
+import dmunit
+import genfiles
+
+
+def main(ip, port, heartbeat_port, scripts, directory, isTestDevice, verbose):
+ dmunit.ip = ip
+ dmunit.port = port
+ dmunit.heartbeat_port = heartbeat_port
+ if verbose:
+ dmunit.log_level = logging.DEBUG
+
+ suite = unittest.TestSuite()
+
+ genfiles.gen_test_files()
+
+ if scripts:
+ # Ensure the user didn't include the .py on the name of the test file
+ # (and get rid of it if they did)
+ scripts = map(lambda x: x.split('.')[0], scripts)
+ else:
+ # Go through the directory and pick up everything
+ # named test_*.py and run it
+ testfile = re.compile('^test_.*\.py$')
+ files = os.listdir(directory)
+
+ for f in files:
+ if testfile.match(f):
+ scripts.append(f.split('.')[0])
+
+ testLoader = dmunit.DeviceManagerTestLoader(isTestDevice)
+ for s in scripts:
+ suite.addTest(testLoader.loadTestsFromModuleName(s))
+ unittest.TextTestRunner(verbosity=2).run(suite)
+
+ genfiles.clean_test_files()
+
+
+if __name__ == "__main__":
+
+ default_ip = '127.0.0.1'
+ default_port = 20701
+
+ env_ip, _, env_port = os.getenv('TEST_DEVICE', '').partition(':')
+ if env_port:
+ try:
+ env_port = int(env_port)
+ except ValueError:
+ print >> sys.stderr, "Port in TEST_DEVICE should be an integer."
+ sys.exit(1)
+
+ # Deal with the options
+ parser = OptionParser()
+ parser.add_option("--ip", action="store", type="string", dest="ip",
+ help="IP address for device running SUTAgent, defaults "
+ "to what's provided in $TEST_DEVICE or 127.0.0.1",
+ default=(env_ip or default_ip))
+
+ parser.add_option("--port", action="store", type="int", dest="port",
+ help="Port of SUTAgent on device, defaults to "
+ "what's provided in $TEST_DEVICE or 20701",
+ default=(env_port or default_port))
+
+ parser.add_option("--heartbeat", action="store", type="int",
+ dest="heartbeat_port", help="Port for heartbeat/data "
+ "channel, defaults to 20700", default=20700)
+
+ parser.add_option("--script", action="append", type="string",
+ dest="scripts", help="Name of test script to run, "
+ "can be specified multiple times", default=[])
+
+ parser.add_option("--directory", action="store", type="string", dest="dir",
+ help="Directory to look for tests in, defaults to "
+ "current directory", default=os.getcwd())
+
+ parser.add_option("--testDevice", action="store_true", dest="isTestDevice",
+ help="Specifies that the device is a local test agent",
+ default=False)
+
+ parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
+ help="Verbose DeviceManager output", default=False)
+
+ (options, args) = parser.parse_args()
+
+ main(options.ip, options.port, options.heartbeat_port, options.scripts,
+ options.dir, options.isTestDevice, options.verbose)
diff --git a/testing/mozbase/mozdevice/sut_tests/setup-tools.sh b/testing/mozbase/mozdevice/sut_tests/setup-tools.sh
new file mode 100755
index 000000000..e50a71d0b
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/setup-tools.sh
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+if [ ! -f busybox-armv6l ]
+then
+ wget http://busybox.net/downloads/binaries/1.19.0/busybox-armv6l
+fi
+$ADB remount
+$ADB push busybox-armv6l /system/bin/busybox
+
+$ADB shell 'cd /system/bin; chmod 555 busybox; for x in `./busybox --list`; do ln -s ./busybox $x; done'
diff --git a/testing/mozbase/mozdevice/sut_tests/test-files/mytext.txt b/testing/mozbase/mozdevice/sut_tests/test-files/mytext.txt
new file mode 100644
index 000000000..74cb65fa8
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test-files/mytext.txt
@@ -0,0 +1,177 @@
+this is a file with 71K bytes of text in it
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc placerat, mi sit amet laoreet sollicitudin, neque urna bibendum eros, nec adipiscing tellus ipsum id risus. Sed aliquam ligula nec nibh sollicitudin venenatis. Praesent faucibus tortor vel felis egestas pellentesque. Cras viverra, dui viverra vulputate ornare, eros nunc volutpat nisl, sed sodales turpis orci quis diam. Donec eu sem mi. Mauris dictum blandit mauris quis ultricies. Sed faucibus erat vel velit viverra adipiscing. Donec placerat mattis venenatis. Suspendisse placerat sagittis risus et dapibus. Vivamus diam nisi, elementum ac mollis nec, porta ut sapien. Curabitur ac dolor ligula, vel sollicitudin sapien. Nullam blandit ligula nisl. Proin faucibus, ipsum sit amet molestie tincidunt, tellus neque accumsan lectus, a congue felis odio eu nunc. Pellentesque mauris sapien, varius ut scelerisque et, dictum sed magna. In faucibus tristique erat, a malesuada justo tincidunt sed.
+
+Morbi quis iaculis elit. Praesent nec diam mi, eu auctor neque. Phasellus fringilla turpis a metus imperdiet laoreet et ut augue. Mauris imperdiet scelerisque arcu quis sollicitudin. Nulla mauris dui, ultricies at vulputate quis, pharetra in erat. Donec mollis ipsum quis purus fermentum commodo. Nunc nec orci sem, quis rhoncus mauris. Sed iaculis tempus quam, non consectetur nisl tincidunt vitae. Nulla aliquam sodales auctor. Donec placerat venenatis facilisis. In sollicitudin arcu tincidunt lorem molestie bibendum. Phasellus rutrum ante vitae lorem iaculis eget porta odio pretium.
+
+Duis id mauris ante, eget ullamcorper justo. Integer vitae felis nisi, eget blandit tortor. Vivamus ligula odio, adipiscing sit amet tincidunt id, pretium sed massa. Suspendisse massa felis, viverra non adipiscing quis, dictum eget metus. In porta, tortor a imperdiet sodales, nulla mi mollis ipsum, quis venenatis nunc ipsum sit amet libero. Aenean sed leo eros. Curabitur varius egestas tempor. Nullam vitae convallis nunc. Phasellus molestie volutpat purus ut commodo. Phasellus eget lacus sem. Maecenas ligula magna, lacinia mollis molestie vitae, fringilla ac turpis. Sed ut nunc id nunc fringilla consectetur at et neque.
+
+Aliquam erat volutpat. Nullam lacinia, neque id luctus consectetur, nisl justo porta justo, eu scelerisque ligula ligula sed purus. Cras faucibus porttitor nisi at vulputate. Integer iaculis urna ut sapien iaculis ac malesuada quam congue. Mauris volutpat tristique est, vitae vehicula nisi imperdiet tincidunt. Curabitur semper, tellus sed cursus placerat, mi nulla dapibus odio, quis adipiscing arcu eros eu quam. Nullam fermentum dictum tellus non pretium. Sed dignissim enim a odio varius pellentesque. Nullam at lacinia mi. Nam et sem non risus suscipit pharetra vel et nisl. Cras porta lorem quis diam tempus nec dapibus velit sodales. Suspendisse laoreet hendrerit fringilla.
+
+Phasellus velit quam, malesuada eget rhoncus in, hendrerit sed nibh. Quisque nisl erat, pulvinar vitae condimentum sed, vehicula sit amet elit. Nulla eget mauris est, vel lacinia eros. Maecenas feugiat tortor ac nulla porta bibendum. Phasellus commodo ultrices rhoncus. Ut nec lacus in mauris semper congue. Vivamus rhoncus dolor a nulla accumsan semper. Donec vestibulum dictum blandit. Donec lobortis, purus a cursus faucibus, enim nisl fermentum odio, sed sagittis odio quam quis elit. Sed eget varius augue. Quisque a erat dolor, sit amet porttitor eros. Curabitur libero orci, dignissim vel egestas ut, laoreet sit amet augue. Curabitur porta consectetur felis. Etiam sit amet enim dolor, quis lacinia libero. Nunc vel vulputate turpis. Nulla elit nunc, dignissim sed hendrerit vitae, laoreet et urna. Donec massa est, porta eget lobortis sed, dictum vel arcu. Curabitur nec sem neque.
+
+Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. In ipsum risus, blandit ac porta non, imperdiet ac erat. Sed libero nisi, gravida quis dignissim vel, mattis quis sem. Ut pretium vulputate augue, a varius mi vehicula at. Ut cursus interdum lobortis. Duis ac sagittis lacus. Suspendisse pulvinar feugiat mi id vestibulum. Integer aliquet augue vitae augue tincidunt pharetra. Duis interdum nunc pellentesque nisl malesuada volutpat. Nam molestie pulvinar felis, quis volutpat urna commodo in. Donec sed adipiscing risus. Mauris nec orci ac eros lacinia euismod sed sed dui. Mauris vel est eget mi bibendum venenatis nec id enim. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas ac orci varius mi aliquam sodales. Cras dapibus lorem et erat tincidunt non consectetur risus commodo. Aenean tincidunt varius orci eu placerat. Sed in euismod justo.
+
+Pellentesque auctor porta magna, vitae volutpat est pharetra id. Phasellus at mi nibh, vitae eleifend mi. Sed egestas orci lacus. Mauris suscipit nunc non diam mattis rutrum. Etiam pretium, mi et ultricies molestie, ante nibh posuere dolor, a fermentum diam massa eget purus. Aliquam erat volutpat. Nam accumsan dapibus quam, vitae dictum est bibendum ut. Sed at vehicula mi. Phasellus vitae ipsum a quam cursus euismod sit amet et turpis. Nam ultricies molestie massa, a consectetur ipsum aliquet sit amet. Pellentesque non orci mauris. Suspendisse congue venenatis est convallis laoreet. Aenean nulla est, bibendum id adipiscing quis, fermentum quis nisi. Nam lectus ante, sodales sodales ultrices a, vehicula ac ligula. Phasellus feugiat tempor lectus, id interdum turpis mollis eu. Suspendisse potenti. Sed euismod tempus ipsum, et iaculis felis consequat sed. Mauris bibendum, eros a semper pharetra, nunc urna commodo lacus, quis placerat dui urna semper libero. Mauris turpis metus, mattis id dignissim eget, sollicitudin nec lacus.
+
+Donec massa dui, laoreet dignissim interdum sit amet, semper vel ligula. Maecenas ut eros est, quis hendrerit purus. In sit amet mattis quam. Curabitur sit amet turpis ac ipsum gravida pulvinar sit amet ut libero. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Vestibulum bibendum massa eu nisi fermentum varius. Mauris sollicitudin ultrices nunc, eget facilisis est imperdiet sit amet. Nam elementum magna eget nisi commodo tincidunt. Aliquam erat volutpat. Curabitur in mauris nunc, at eleifend lectus. Integer tincidunt vestibulum lectus, ut porttitor magna dapibus a. Vivamus erat massa, pretium sed tincidunt ac, tincidunt hendrerit ligula. Praesent purus eros, euismod at commodo eu, bibendum eu turpis.
+
+Sed tempor ultrices tortor, et imperdiet est porttitor a. Vestibulum sodales mauris sed urna pellentesque eleifend. Ut euismod tristique nulla eu fermentum. Ut eu dui non purus varius mollis in vel enim. Maecenas ut congue nulla. Suspendisse ultrices sollicitudin molestie. Aliquam vel pulvinar metus. Nulla varius adipiscing metus, ac commodo ante dapibus ac. Phasellus sit amet ligula sed elit scelerisque molestie sit amet ac quam. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Pellentesque sollicitudin libero a quam rutrum egestas ac quis arcu. Etiam mattis massa vel erat mattis ut elementum diam cursus. Fusce bibendum lorem in erat auctor posuere. Ut non mi sed neque sodales vulputate. Donec lacinia, lacus nec hendrerit luctus, dolor nisi dignissim turpis, at rhoncus dui nisi nec elit. Integer laoreet, justo ut pellentesque iaculis, diam turpis scelerisque quam, sit amet semper purus lacus at erat. Sed sollicitudin consectetur eros at ultricies.
+
+Nam in dolor massa. Vivamus semper, quam sed bibendum pellentesque, lectus purus auctor dui, eget mollis tellus urna luctus nisi. Duis felis tellus, dapibus sed sollicitudin commodo, ornare id metus. Aliquam rhoncus pulvinar elit sit amet fermentum. Curabitur ut ligula augue, nec rhoncus orci. Proin ipsum elit, tristique semper rhoncus sit amet, ultrices vel orci. Integer mattis hendrerit blandit. Curabitur tempor quam eget nunc rutrum nec porta elit elementum. Morbi at accumsan libero. Etiam vestibulum facilisis augue vitae feugiat. Vivamus in quam arcu, vel ornare purus. Pellentesque non augue sit amet metus imperdiet accumsan. Suspendisse condimentum vulputate congue. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Donec consequat enim ac est iaculis dictum. Vivamus rhoncus, urna sit amet tempor ornare, nulla sem eleifend mi, eu pretium justo sapien a nulla. Nulla facilisi. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Aenean sed mattis turpis.
+
+Nulla in magna scelerisque sem imperdiet tempus. Aenean adipiscing pretium sem, eget vulputate turpis pretium vitae. Etiam id enim a mauris faucibus facilisis faucibus vel enim. Phasellus blandit mi nec nibh rhoncus nec sollicitudin mi semper. Maecenas euismod dui sit amet dolor dictum dignissim. Mauris ac quam urna, quis posuere lacus. Sed velit elit, dapibus hendrerit sagittis at, pulvinar ac velit. Quisque in nulla vel massa posuere feugiat sed quis enim. Donec erat eros, adipiscing at fringilla sed, ornare id nisl. Duis eleifend consectetur tincidunt. Donec enim augue, mollis sed commodo mattis, luctus ac libero. Vestibulum erat ante, lacinia ac porttitor quis, vulputate et ligula. Nunc nisl orci, eleifend et laoreet eu, egestas et est. Nulla nulla purus, euismod nec porttitor quis, volutpat id diam. Nunc ut nisl eget orci venenatis mattis. In eget nisi nibh. Integer erat mauris, interdum nec mattis in, pulvinar vitae orci. Duis dictum tortor in elit aliquet commodo. Vestibulum venenatis auctor faucibus. Nulla adipiscing nisi eu lectus ornare ultrices.
+
+Curabitur placerat ante a odio dapibus placerat. Praesent ante quam, rutrum quis dignissim vulputate, dignissim vitae elit. Curabitur et nibh ante. Sed luctus bibendum pulvinar. Ut vel justo eros. Maecenas faucibus ornare consequat. Mauris non interdum elit. Mauris tortor magna, tempor quis rutrum ac, congue ut sem. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Sed semper interdum quam eu semper. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Mauris enim velit, mattis at dictum eget, ornare vel erat. Quisque non tincidunt lectus. Vestibulum auctor scelerisque erat eget adipiscing. Mauris ac metus purus, sit amet dignissim felis.
+
+Curabitur vitae quam sagittis massa aliquet facilisis id tempor justo. Aenean vulputate libero nec odio porta in rhoncus massa interdum. Maecenas consectetur suscipit consectetur. Proin a mauris sit amet ante sollicitudin auctor id ac libero. Vivamus hendrerit porta augue, ac pretium nibh cursus at. Aliquam varius nulla porta quam pellentesque scelerisque eget a felis. Maecenas elit quam, tempor vel dignissim nec, aliquam ac justo. Curabitur scelerisque cursus orci, sit amet scelerisque dolor consectetur vel. Integer tellus tortor, laoreet laoreet consequat id, vehicula nec neque. Sed sit amet ante sed magna faucibus luctus et vel nisi.
+
+Curabitur placerat viverra urna et auctor. Proin ac lacus urna, vitae sagittis erat. In ut tellus ipsum, rutrum auctor orci. Sed dolor nibh, laoreet egestas egestas non, eleifend eu lectus. Aenean lorem leo, rhoncus sit amet fermentum in, porta vel leo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Praesent lorem orci, congue nec consectetur eu, ullamcorper non nulla. Duis sed augue libero. Suspendisse potenti. Nunc id neque massa.
+
+Etiam odio magna, congue ut tristique non, dignissim nec est. Sed id purus velit. Vivamus dui dui, rutrum sit amet imperdiet non, pharetra cursus ante. Curabitur aliquet dapibus massa, non molestie orci aliquet tincidunt. Aenean in varius risus. Nullam faucibus sapien odio. Integer id est erat. Nam iaculis purus a ipsum sagittis in vestibulum lectus pulvinar. Nulla ultricies nisi a nibh gravida eget vestibulum tellus auctor. Suspendisse ut dolor elementum mi iaculis dignissim eu eleifend tellus. Sed pretium mi ligula. Integer vitae sem sit amet nunc dignissim rutrum nec eleifend felis. Aenean blandit fermentum lectus quis dignissim. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Duis congue sem a est accumsan sit amet facilisis erat dapibus. Mauris id lectus ipsum. Sed velit metus, ultrices rhoncus porta non, consectetur id ligula.
+
+Fusce eu odio volutpat sem pellentesque laoreet. Integer a justo ante, sed elementum elit. Donec sed mattis arcu. Vivamus imperdiet sodales ante, eget tincidunt turpis imperdiet et. Donec mi ante, tincidunt nec adipiscing sit amet, sodales vel arcu. Cras eu libero arcu. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Aliquam ac dui justo. Nam nisi ipsum, dignissim id fermentum at, accumsan ut quam. Quisque non est quis nibh iaculis gravida nec id velit. Cras elementum tincidunt mattis. Mauris odio erat, sodales ut egestas nec, semper eget enim. Mauris quis tincidunt quam.
+
+Nullam vestibulum ligula imperdiet nunc tincidunt feugiat imperdiet neque sodales. Praesent lacinia sollicitudin pulvinar. Donec ipsum augue, interdum et commodo vitae, lobortis nec ipsum. Nulla ac diam sed ipsum venenatis malesuada at eu odio. Vivamus in urna sed sapien mollis convallis eget eu massa. Proin viverra dolor vitae sem porta vulputate. Donec velit leo, ullamcorper dictum rhoncus in, euismod et ante. Morbi at leo venenatis felis faucibus elementum a a elit. Integer aliquet tempor neque ac bibendum. In fermentum commodo faucibus. In hac habitasse platea dictumst. Nam pulvinar gravida metus in rhoncus. Praesent lobortis ornare libero quis faucibus. Donec a erat ligula. Praesent quis sapien sit amet urna adipiscing sagittis.
+
+Praesent eget libero sed massa ornare congue eget eu lorem. Nunc porta magna ut massa dignissim ultricies. Duis eu arcu quis purus consequat egestas vitae a ipsum. In nunc sapien, venenatis et commodo sollicitudin, facilisis rhoncus risus. Nullam aliquam, orci eu vestibulum sagittis, nulla risus dictum dui, non luctus diam arcu in massa. Maecenas risus lacus, adipiscing sed laoreet sed, ornare sit amet quam. Nam convallis euismod sagittis. Fusce justo mauris, laoreet lobortis gravida semper, tincidunt pellentesque nisl. Sed sit amet turpis in nisi molestie sagittis eget sit amet nulla. Donec eget semper mauris. Aenean nec odio a nibh faucibus dapibus. Donec imperdiet tortor non elit congue varius. Morbi libero enim, tincidunt at bibendum vitae, dapibus ac ante. Proin eu metus quis turpis bibendum molestie. Nulla malesuada magna quis ante mollis ultrices. Suspendisse vel nibh at risus porttitor mattis. Nulla laoreet consequat viverra. Ut scelerisque faucibus mauris sed vestibulum. In pulvinar massa in magna dapibus ullamcorper. Quisque in ante sapien, nec ullamcorper tortor.
+
+Etiam in ipsum urna, eu feugiat nibh. In sed eros ligula, eget interdum lorem. Cras ut malesuada purus. Suspendisse vel odio quam. Vivamus eu rutrum quam. Integer nec luctus est. Mauris aliquam est ac neque convallis placerat. Sed massa ante, sagittis a tincidunt semper, interdum eget mauris. Sed a ligula sed justo facilisis sagittis vel eu ipsum. Quisque aliquam vestibulum nisl quis commodo.
+
+Morbi id rutrum mi. Curabitur a est quis mauris accumsan egestas a vulputate urna. Nunc eleifend lacus non lacus tincidunt vitae commodo odio mattis. Cras accumsan blandit odio, vitae mattis est egestas eget. Integer condimentum sem in lectus euismod consectetur. Donec est lectus, posuere sit amet ornare non, ullamcorper vel dolor. Vestibulum luctus consectetur scelerisque. Duis suscipit congue mi id venenatis. Quisque eu mauris venenatis dolor condimentum gravida a a leo. Aenean et massa est. Sed arcu ligula, sagittis in luctus in, condimentum a nisl. In placerat interdum felis, eu luctus dolor rutrum sed. Nam commodo, urna a adipiscing scelerisque, turpis arcu adipiscing metus, at blandit nulla elit quis sapien. Quisque sodales tincidunt odio, quis sodales erat bibendum condimentum. Ut semper dolor in ipsum tincidunt convallis. Phasellus molestie nulla id ipsum semper ultrices. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Mauris aliquam semper neque at sagittis. Curabitur luctus tristique facilisis. Donec scelerisque ante non tortor fringilla eleifend non in felis.
+
+Maecenas nec ipsum eget odio ornare egestas non non tortor. Vestibulum elementum ultrices ipsum, nec elementum augue dapibus vitae. Fusce hendrerit erat eget libero porttitor sit amet venenatis neque mollis. Donec lorem quam, egestas sed rutrum pharetra, ultrices quis quam. Phasellus iaculis risus eget leo suscipit eu consectetur libero bibendum. Nulla euismod, est sit amet tristique tincidunt, nisi turpis sagittis justo, ornare elementum nibh turpis at ipsum. Mauris id velit risus, in lacinia libero. Integer at urna eu sapien luctus sollicitudin. Vestibulum vitae varius est. Curabitur eget quam urna, cursus egestas orci.
+
+Sed eu felis nisi. Nullam nisi lacus, imperdiet sed accumsan sed, pretium ac dolor. Curabitur feugiat tristique velit, id fermentum velit blandit lobortis. Phasellus ac arcu vel lacus ultricies aliquet. Morbi aliquet pulvinar convallis. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin tincidunt commodo tortor, vitae semper velit consequat ac. Suspendisse ac sollicitudin elit. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Quisque imperdiet, mi sed ultrices ullamcorper, eros justo malesuada urna, ac dapibus turpis leo sed sem. Nulla commodo consectetur libero a scelerisque. Maecenas in tortor sem, vitae rhoncus magna. Nulla nec nisl nisl, eget iaculis felis. Phasellus placerat consectetur erat, non porta tellus egestas nec. Praesent gravida pharetra arcu. Nullam bibendum congue eleifend.
+
+Nam risus dolor, mollis in suscipit vel, egestas eget augue. Donec et nulla mi. Vestibulum nunc mauris, volutpat eget lacinia ut, consequat non justo. Etiam bibendum elit quis ipsum volutpat sit amet convallis erat feugiat. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed non turpis elit, in laoreet sapien. Quisque ac elit id odio luctus pharetra. Phasellus sit amet est nec orci vestibulum varius. Cras ut justo a velit accumsan scelerisque. Proin lacus odio, convallis in semper egestas, ullamcorper sit amet erat. Proin ornare mollis pharetra. Phasellus convallis, sapien a placerat scelerisque, magna ante lobortis massa, ut semper nibh turpis a nibh.
+
+Vestibulum risus mauris, auctor eu aliquam quis, pretium vel massa. Nunc imperdiet magna quis nisi facilisis euismod. Nunc aliquam, felis quis mollis aliquam, mi arcu commodo eros, sit amet convallis nunc magna non magna. Suspendisse accumsan tortor non metus convallis pharetra. In vitae mi sed leo ornare viverra. Donec a massa at sem euismod scelerisque id a sapien. Nam nec purus purus, quis lacinia sem. Sed laoreet erat quis tortor feugiat at mattis lacus sollicitudin. In hac habitasse platea dictumst. Vivamus tristique rhoncus eros a hendrerit. Etiam semper dapibus tortor, quis porta purus ullamcorper eget. In iaculis elit ut neque varius at consequat tellus accumsan.
+
+Praesent ut ipsum nec nulla consequat laoreet. Quisque viverra rutrum bibendum. Vivamus vitae bibendum augue. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nulla hendrerit condimentum lacinia. Donec sed bibendum lectus. Ut venenatis tincidunt neque et fermentum. Mauris fermentum, est at molestie luctus, nunc lorem sodales dolor, ut facilisis massa risus ut sem. Vestibulum nec nisi sed lacus imperdiet ornare. Duis sed lobortis nisi. In urna ipsum, posuere fringilla adipiscing eu, euismod a purus. Proin bibendum feugiat adipiscing. Morbi neque turpis, ullamcorper at feugiat ac, condimentum ut ante. Proin eget orci mauris, nec congue dolor.
+
+Sed quis dolor massa, sed fermentum eros. Fusce et scelerisque tortor. Donec bibendum vestibulum neque, id tristique leo eleifend non. Ut vel lacinia orci. Etiam lacus erat, varius viverra accumsan sit amet, imperdiet at sapien. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Vestibulum erat lacus, hendrerit consectetur vulputate id, mattis eu nunc. Morbi lacinia bibendum eros, sit amet luctus nisl lobortis lobortis. Nullam sit amet nisl vel justo ornare bibendum eu quis nunc. Morbi faucibus dictum quam, sed suscipit est auctor ac. Sed egestas ultricies sem a pharetra. Phasellus sagittis ornare lorem eu aliquam. Praesent vitae lectus ut dui consectetur varius. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nam ac orci id metus tincidunt congue vel ut mi. Nunc auctor tristique enim quis sodales.
+
+Aenean quis tempor libero. In sed quam purus. Nam in velit erat. Ut ullamcorper nunc ut nibh facilisis non imperdiet enim interdum. Praesent et mi nulla, quis facilisis lacus. Nulla luctus, velit vulputate egestas aliquet, arcu dolor vulputate tellus, eu auctor ipsum tortor ut lorem. In sed nulla auctor elit adipiscing laoreet. Mauris id pretium velit. Vestibulum aliquet bibendum laoreet. Duis convallis, leo vitae tincidunt fringilla, massa eros porta lorem, a convallis sem massa sit amet libero. Nam ligula leo, porta non hendrerit a, luctus pellentesque tortor. Nulla fermentum mi lacinia est pellentesque sed rhoncus nisl tristique. Curabitur venenatis neque id magna egestas eget dictum nisi volutpat.
+
+Ut sagittis fringilla arcu, ut condimentum metus tempor et. Duis elit neque, varius quis consectetur et, vulputate egestas odio. Curabitur molestie congue nibh, pulvinar tincidunt elit tempus ut. Quisque nec magna lacus. Quisque eu justo lacus. Maecenas tempus porttitor consequat. Ut vulputate lacinia tempus. Praesent dignissim iaculis orci ac euismod. Proin porttitor lorem auctor erat placerat quis tincidunt tellus posuere. Nam ultrices sapien ultrices urna aliquet convallis. Aenean auctor fringilla vestibulum.
+
+Proin eros nisl, viverra placerat eleifend a, facilisis et augue. Duis commodo tincidunt molestie. Nullam malesuada ligula eget libero tincidunt viverra. Ut euismod sem in turpis posuere rhoncus. Donec luctus, eros quis ultricies eleifend, lacus ligula porttitor magna, sit amet lobortis enim turpis non orci. Nunc odio nisi, luctus id euismod non, hendrerit quis dolor. Proin tristique sem semper massa porttitor fringilla. Curabitur a felis tellus. Donec tempus, libero at ornare commodo, risus sapien venenatis mi, sit amet fringilla diam enim at arcu. Suspendisse potenti. Phasellus auctor, lorem sed pulvinar ornare, eros nunc tincidunt dui, semper interdum lorem purus nec turpis. Sed egestas, orci non varius dapibus, nulla felis rutrum tortor, a vehicula nisi magna et magna. Donec aliquam rhoncus arcu ac volutpat.
+
+Quisque leo risus, egestas eu posuere eget, malesuada quis erat. Donec vel nisi quis erat vestibulum consectetur. Donec mi mi, dictum vel posuere ac, pharetra non justo. Vivamus rhoncus mollis odio, eu fermentum turpis blandit a. Pellentesque ornare consequat odio, non sodales massa sollicitudin ac. Vestibulum euismod nisi non augue commodo vitae laoreet justo tempor. Vestibulum at arcu ac elit tincidunt vehicula pretium eget magna. Nullam non eros eros. Morbi sed diam ut leo viverra gravida a sit amet sem. Duis ultricies tellus in nisi vulputate rhoncus. Praesent molestie eros et ligula sodales ut euismod arcu egestas. Cras ullamcorper dapibus erat id luctus. Maecenas pretium rutrum mauris, ac rhoncus lacus commodo eu. Duis ut diam quis neque accumsan laoreet in eu tellus. Curabitur sit amet ligula nibh. Vestibulum vitae semper leo. Sed volutpat turpis dictum justo luctus quis gravida tortor volutpat. Proin velit dolor, tempor quis iaculis eu, congue vitae nisl. Vestibulum porttitor, risus id consequat suscipit, ipsum leo luctus tellus, sed sollicitudin nulla orci eget arcu.
+
+Fusce et urna sed erat porttitor condimentum convallis et ipsum. Integer sagittis arcu sit amet dolor interdum eu tincidunt sapien sodales. Sed ut elementum ipsum. Aliquam erat volutpat. Fusce vel enim velit. Duis sit amet gravida quam. Sed iaculis aliquet erat sed semper. Sed in ipsum nisi. Suspendisse blandit urna ac lectus congue hendrerit. Donec sapien enim, auctor quis suscipit id, interdum a nunc. Etiam erat velit, hendrerit eget tincidunt ut, pellentesque in lectus. Integer vitae lacus eget est tempor dapibus. Duis in velit augue. In accumsan ipsum eu nibh commodo id consequat lectus condimentum. Integer volutpat condimentum posuere.
+
+Proin at turpis sapien, vel bibendum odio. Etiam scelerisque, nulla vel dapibus dapibus, neque nunc fringilla libero, nec malesuada elit erat eget turpis. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec eu mi nisi. Mauris quis dolor libero. Etiam non libero mauris. Nam posuere tortor vel dolor aliquam eu porttitor nisi convallis. Sed eu ante nec diam hendrerit aliquet. Suspendisse fermentum, augue ut lobortis viverra, turpis mi tristique felis, a facilisis est nisi vitae nisl. Nunc sit amet semper tortor. Duis et enim in nulla aliquet fermentum. Etiam ultrices facilisis justo, quis molestie enim convallis ut. Donec congue, eros quis rhoncus interdum, nisl orci porta nisl, posuere tincidunt est tellus nec magna. Suspendisse interdum, lorem nec dictum dignissim, justo dui imperdiet felis, laoreet ultricies lacus elit eu libero. Sed quis urna nec nisi condimentum tristique pulvinar id orci. Vivamus a leo nec libero hendrerit imperdiet. Sed gravida interdum urna, ac dictum odio dictum id. Vestibulum vel varius dolor.
+
+Nulla consequat condimentum eros nec mollis. Donec eget ornare eros. Etiam consequat accumsan aliquet. Quisque non leo nibh. Mauris convallis congue hendrerit. Aliquam nec augue at risus ornare viverra at id felis. Nullam ac turpis ut nisl semper rhoncus quis sit amet justo. Aliquam laoreet arcu vitae odio consequat condimentum. Aliquam erat volutpat. Sed consectetur ipsum nec justo tempor ullamcorper. Donec ac sapien lectus. Suspendisse ut velit eget massa dapibus tincidunt vel eget enim. Etiam quis quam vel lectus tincidunt viverra eget eget risus.
+
+Nulla pulvinar, odio eu hendrerit egestas, nisl nunc gravida mi, non adipiscing tortor mauris a lectus. Sed sapien mi, porttitor vel consectetur ut, viverra ut ipsum. Duis id velit vel ipsum vestibulum sodales. Nunc lorem mi, mollis nec malesuada nec, ornare faucibus nunc. Vestibulum gravida pulvinar eros quis blandit. Nulla facilisi. Curabitur consectetur condimentum justo sed faucibus. Vestibulum neque urna, tincidunt in adipiscing a, interdum a orci. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec libero neque, fringilla quis bibendum vel, tincidunt eget metus. Integer tristique, lectus quis rhoncus iaculis, enim dui adipiscing massa, sit amet blandit risus orci eu magna. Fusce ultricies tellus quis massa tempus at laoreet turpis dapibus. Donec sit amet massa viverra purus tincidunt scelerisque. Nunc ut leo nec tellus imperdiet vulputate tincidunt sed nisi. Suspendisse potenti. Sed a nisi nunc. Ut tortor quam, vestibulum et ultrices id, mattis non lacus.
+
+Nullam tincidunt quam quis erat rutrum eget tempor diam vestibulum. Morbi dapibus, quam sed placerat blandit, mi enim dictum nulla, sit amet sollicitudin lectus ante eu sem. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed in elit id nisi aliquet mollis. Cras non lorem risus. Ut libero elit, ornare id placerat ut, sodales at lectus. Nunc orci turpis, tempus vitae pellentesque id, sodales et sem. Aliquam erat volutpat. Sed sit amet tellus condimentum magna cursus consectetur non sed arcu. Vivamus in consectetur massa. Aliquam vitae nibh nec lacus volutpat sodales. Quisque est arcu, porttitor a pharetra ac, laoreet nec nibh. Nunc ullamcorper adipiscing libero a dictum. Vivamus vulputate egestas arcu non viverra. Phasellus eget libero in ipsum fringilla dapibus. Quisque vehicula rhoncus lorem vel dictum. Sed molestie lorem ac tellus ultrices a varius dui faucibus. Integer quis quam libero. Sed fringilla aliquet lacus, non porttitor erat ultricies eu.
+
+Fusce bibendum euismod porta. Praesent libero nunc, dapibus ac aliquam fringilla, ornare quis eros. Vivamus tincidunt arcu vitae felis varius nec facilisis elit fermentum. In quis quam eget mauris porta faucibus. Fusce nec erat eu lectus pellentesque tempus. Morbi a justo a ante pulvinar ultricies ac tincidunt turpis. Etiam malesuada ultrices nibh quis bibendum. Quisque lacus dui, mattis id lobortis sit amet, fermentum id nisl. Donec fermentum nisi ac metus consectetur semper. Duis condimentum ipsum sit amet arcu adipiscing cursus. Nulla vulputate risus vel elit adipiscing sed pretium mauris venenatis. Vestibulum tincidunt, sapien at dapibus rutrum, urna nisi sollicitudin orci, ut condimentum lectus tellus ut lacus. Sed in nisl et urna placerat vestibulum. Ut fringilla suscipit iaculis. In in eros eget neque suscipit mollis quis ut libero. Pellentesque hendrerit consectetur tellus. Nulla a purus ut dolor volutpat ultrices.
+
+Pellentesque at laoreet libero. Quisque pretium tempus placerat. Proin egestas rhoncus est, eu vehicula justo gravida eu. Sed sem velit, sodales tincidunt gravida vitae, rhoncus vel neque. Proin quis quam ut turpis rhoncus suscipit quis vitae tellus. Phasellus non scelerisque nisl. Vestibulum lectus odio, tristique vitae rhoncus id, dapibus vitae magna. Vestibulum aliquet magna in turpis eleifend in dapibus augue lacinia. Ut risus mi, dictum at mollis eu, feugiat a massa. Nam in velit urna. Aliquam imperdiet porta eros a suscipit. Nullam ante quam, congue ut lacinia vel, laoreet vitae felis. Mauris commodo ultricies lobortis. Donec id varius augue. Vivamus convallis, nulla eget aliquam varius, ligula quam rhoncus augue, vel rutrum diam odio in felis. Nulla facilisi. Duis pretium magna nulla, id pretium mi.
+
+Sed elit odio, semper non semper vel, dapibus eu metus. Ut quis nibh vel leo laoreet egestas vitae id odio. Nunc nec egestas nisl. Vivamus tristique pulvinar leo ullamcorper convallis. Praesent elementum condimentum consectetur. Etiam dui nisi, convallis vel fringilla ac, dignissim vel velit. Fusce magna quam, malesuada at vehicula quis, luctus vel tortor. Vivamus viverra consectetur velit, quis bibendum dolor hendrerit nec. Mauris pretium laoreet eleifend. Donec in ligula a enim fringilla pellentesque vitae sed magna. Integer vitae odio et arcu tempor molestie.
+
+In lacus quam, placerat nec accumsan ut, faucibus eget tellus. Maecenas cursus risus enim. Pellentesque quis lorem orci, id dictum velit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Sed justo arcu, tristique vitae mollis id, dictum non enim. Proin gravida fringilla est eu elementum. Donec ac nulla sapien, et volutpat lectus. Mauris eget quam vel dolor aliquet pretium eu nec dolor. Phasellus auctor nunc ut risus aliquet eu consequat urna rutrum. Integer porta lacus vel metus hendrerit sed fermentum arcu hendrerit. Morbi nibh arcu, tristique ut hendrerit in, rhoncus eget elit.
+
+Morbi tincidunt lectus ut metus aliquam adipiscing. Phasellus eros purus, laoreet non rhoncus nec, aliquet sed justo. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Quisque leo nunc, feugiat ut consequat in, condimentum sit amet urna. Nam et dui sed orci pellentesque feugiat. Aliquam erat volutpat. Aliquam rhoncus sollicitudin orci. Ut blandit dignissim est, a dapibus erat tincidunt vel. Fusce dignissim vehicula lorem non suscipit. Vivamus gravida accumsan est nec consectetur. Etiam congue diam non nisi ornare semper. Maecenas pretium vestibulum velit. Suspendisse at tincidunt quam. In vitae sagittis est. Duis convallis sollicitudin nunc quis posuere. Quisque et augue eget metus commodo pulvinar. Pellentesque et velit eget massa scelerisque sagittis. Aenean tortor magna, auctor sed sodales et, vestibulum sit amet leo. Vestibulum id ligula vel nisi faucibus cursus.
+
+Quisque hendrerit, lorem vel ultricies adipiscing, massa ligula consectetur odio, eu eleifend sem eros varius magna. Mauris metus arcu, hendrerit et fringilla sit amet, vehicula vel leo. Pellentesque eu tellus in nulla sollicitudin tempus. Sed dapibus cursus facilisis. Cras id lectus turpis, et iaculis felis. Nulla dignissim dui non sem posuere posuere. Ut id arcu sit amet quam tristique malesuada. Curabitur ut posuere urna. Vivamus aliquet pretium leo, id sollicitudin nulla tempor eget. Aliquam commodo enim lacus, quis hendrerit lacus. Praesent tortor felis, semper vel aliquet eget, aliquet a ante. Nullam ullamcorper arcu nibh, a facilisis neque. Nunc rutrum posuere sagittis. Donec eleifend aliquam vulputate. Curabitur eget dapibus ipsum. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Cras mollis laoreet nunc, ut suscipit tellus laoreet semper.
+
+Phasellus libero enim, malesuada ut rutrum a, sollicitudin sed elit. Ut suscipit imperdiet nibh, vel gravida mauris fringilla non. Pellentesque sagittis libero id nulla adipiscing vitae iaculis justo consequat. In hac habitasse platea dictumst. Sed venenatis cursus est, et iaculis nisi convallis vel. Etiam non elementum mi. Etiam semper faucibus orci. Nullam tincidunt, lorem commodo sodales placerat, est velit interdum nulla, ut rutrum lectus massa malesuada elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Mauris faucibus odio vel tellus ornare vitae lacinia libero lobortis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Cras porttitor, tortor vel adipiscing ornare, nunc elit lobortis nisl, eu vehicula sapien purus id diam. Sed blandit bibendum facilisis. Pellentesque ornare auctor commodo. In et aliquet magna. Fusce molestie sem eget orci semper sollicitudin. Donec placerat tristique urna, a varius velit sagittis eget. Aliquam vitae rutrum orci. Vivamus ac lobortis dui. Integer ornare lobortis sem vel convallis.
+
+Praesent ornare aliquet arcu, sed lacinia dui convallis quis. Suspendisse nec arcu lectus. Suspendisse potenti. Curabitur scelerisque quam id lacus vehicula ut tristique eros viverra. Mauris et mi ac massa auctor pharetra a eget enim. Sed vel dui sem, ut pulvinar risus. Etiam ac ipsum ipsum, eu venenatis odio. Proin lacinia eleifend risus sed hendrerit. Quisque velit nunc, sodales vitae venenatis vitae, lacinia porta neque. Donec nec vestibulum massa. Duis blandit, sapien in congue pharetra, dolor felis pharetra velit, semper vulputate metus massa ac leo. Etiam dictum neque sed lectus condimentum euismod. Maecenas vel magna ultrices lorem fermentum feugiat. Proin pulvinar ornare libero, aliquet tincidunt neque laoreet vitae. Mauris adipiscing convallis massa, quis pellentesque nulla rhoncus quis. Etiam viverra condimentum commodo. Nulla feugiat molestie ipsum sed pretium. Aenean rhoncus imperdiet urna, quis fringilla justo commodo sed. Aliquam erat volutpat. Morbi sed sem nulla.
+
+Integer scelerisque leo eu massa porta non tincidunt velit dictum. Ut ac ligula ipsum. Phasellus vehicula gravida felis, ac commodo lacus mattis ac. Nam bibendum enim eget diam mattis pharetra. Suspendisse malesuada arcu lacus. Nulla elementum arcu a nulla aliquam eu vestibulum dui pulvinar. Duis a facilisis risus. Nam ac dui nibh, eu porttitor mauris. Integer sollicitudin egestas dui, mollis laoreet mauris molestie ac. Aliquam egestas auctor neque, vitae aliquet dolor tincidunt blandit. Suspendisse laoreet orci at augue dapibus suscipit. In hac habitasse platea dictumst. Phasellus egestas ornare sem ac tincidunt. Suspendisse condimentum sem non augue tincidunt vulputate. Mauris cursus quam vel tortor dapibus eu ultricies mauris viverra. Nulla elit dolor, placerat sit amet facilisis non, fringilla in felis.
+
+Proin consequat diam non quam accumsan faucibus. Sed malesuada, dui quis placerat sagittis, sapien libero molestie libero, a sodales tortor neque non elit. Nulla et sodales ante. Donec tempor, tortor ut congue pulvinar, mi elit tempus risus, a pharetra libero quam a augue. Nulla facilisi. Quisque feugiat tortor a arcu dictum tincidunt. Nulla tincidunt tincidunt tortor, ac suscipit eros bibendum pharetra. Ut dignissim sollicitudin massa, et porttitor ligula vulputate a. Integer condimentum dapibus diam in tempor. Pellentesque molestie fringilla rhoncus. Donec eget laoreet libero. Suspendisse vulputate sapien eu sapien faucibus egestas.
+
+Integer nec erat dui, at eleifend arcu. Cras mauris est, cursus vel euismod sed, suscipit quis lorem. Donec neque sem, laoreet suscipit scelerisque a, volutpat at lectus. Pellentesque non felis erat, sed pulvinar nisl. In congue sollicitudin metus sodales convallis. Fusce venenatis risus ut velit adipiscing vestibulum eu sed augue. Proin metus turpis, sodales at faucibus vel, fringilla sodales ligula. Sed fringilla magna sed diam lacinia adipiscing. Maecenas nibh nibh, consequat vel malesuada sed, vestibulum nec felis. Quisque tempus lobortis dui ut euismod. Nulla facilisi. Ut adipiscing purus quis purus pellentesque eu viverra nunc placerat. Nullam nec dignissim diam. Fusce non dignissim massa. Donec condimentum, orci iaculis vulputate elementum, lectus nunc luctus augue, sit amet suscipit nulla odio at massa. Aliquam eu turpis nec massa feugiat condimentum a ac lectus. Nunc lectus ligula, feugiat vel bibendum et, tempor quis mi. Curabitur molestie, urna quis fringilla consequat, ipsum erat sodales turpis, ut laoreet velit risus vitae libero.
+
+Aliquam erat volutpat. Suspendisse tincidunt accumsan eros in posuere. Morbi non ullamcorper augue. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec dui felis, feugiat semper venenatis vitae, lobortis nec ipsum. In lacinia mauris in massa ornare vel congue ipsum lacinia. Maecenas rhoncus vulputate enim, ut porttitor purus gravida id. Nunc urna ligula, pulvinar eu lacinia nec, scelerisque at nibh. Nam accumsan leo est. Pellentesque congue fermentum nisl ac semper. Sed eget blandit urna. Nullam interdum, risus id hendrerit ultrices, turpis erat vestibulum turpis, quis vehicula mauris sem sit amet est. Mauris et lorem metus, id rutrum nisl. Donec blandit dapibus neque, hendrerit fringilla diam tempus sed. Integer vestibulum, felis quis pulvinar adipiscing, ipsum risus convallis lorem, ac pulvinar lacus nunc sed felis.
+
+Vestibulum vitae tristique orci. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam erat volutpat. Maecenas non eros quis nulla adipiscing rutrum eu at mi. Suspendisse laoreet nulla vitae nunc venenatis vitae adipiscing felis pharetra. Integer viverra vehicula risus, vitae dictum massa tempor a. Sed id leo neque, nec consectetur tellus. Donec fermentum eros vitae magna vulputate ac volutpat ligula suscipit. Curabitur mi orci, molestie tristique bibendum egestas, blandit vel arcu. Sed molestie ullamcorper nisl nec dignissim. Fusce consectetur suscipit mauris at ullamcorper. In massa diam, feugiat in euismod id, tincidunt id libero. Donec adipiscing, tellus id vehicula hendrerit, justo mauris sagittis odio, eget placerat felis ante et enim.
+
+Curabitur posuere fermentum arcu id fringilla. Maecenas et purus ipsum. Maecenas auctor, velit a ullamcorper eleifend, arcu tellus adipiscing turpis, ac malesuada ante lorem eu massa. Aenean libero velit, mollis sed imperdiet in, fringilla eu lectus. Cras ullamcorper lobortis massa non volutpat. Nunc sapien lorem, posuere posuere mattis at, rutrum et dolor. Vivamus dignissim consequat nisi in viverra. Maecenas nec diam quis urna ultrices rutrum feugiat quis urna. Cras sed leo mauris. Vestibulum vitae odio ut nunc posuere lobortis. Ut felis eros, posuere at porttitor sit amet, tincidunt in justo. Nullam turpis magna, egestas ac sodales ut, cursus in eros. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Phasellus bibendum nulla nec augue vehicula laoreet. Morbi vitae nunc ac lorem pharetra pellentesque sit amet ut sapien. Maecenas placerat nunc ultricies felis consequat varius. Duis scelerisque ultrices dolor in commodo. Sed sagittis, enim quis pulvinar volutpat, lectus ante tempor arcu, id fringilla velit risus id nibh. Vivamus ac risus ut lorem dapibus ullamcorper.
+
+Vestibulum blandit lacus mattis eros cursus hendrerit. Quisque nibh arcu, condimentum ut imperdiet eget, interdum sed magna. Cras sem mauris, sagittis at dapibus sit amet, vulputate et felis. Suspendisse gravida tincidunt pellentesque. Fusce aliquet, augue eu porttitor ultricies, diam quam lacinia eros, sed consectetur diam dui ut augue. Phasellus turpis diam, hendrerit faucibus convallis et, rhoncus ac mauris. Vivamus vel turpis id arcu mattis imperdiet a nec enim. Ut ultricies mauris at sapien sollicitudin pharetra. Donec dignissim, metus ut condimentum semper, sapien elit pulvinar nisi, id placerat est orci iaculis lectus. Suspendisse quis sem a libero placerat placerat. Etiam ligula nisi, mattis vitae faucibus nec, malesuada et leo.
+
+Fusce mollis venenatis vehicula. Maecenas sit amet tortor mi, et dapibus leo. In ullamcorper dignissim lorem nec interdum. Sed nisl arcu, aliquet vel facilisis sed, rhoncus at quam. Nunc et posuere arcu. Nam faucibus blandit mi ac lacinia. Nullam ultrices tellus a turpis tincidunt sit amet convallis lacus posuere. Proin vitae orci vel justo tempus consequat sed mollis elit. Integer pellentesque bibendum nunc, et gravida mi auctor et. In vitae arcu eros. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Duis vestibulum nulla bibendum justo fermentum suscipit. Cras lobortis lobortis vulputate. Donec risus nisl, sagittis vel congue vel, adipiscing ac augue. Curabitur at diam quis nisl fermentum luctus non ut nisi. Nulla sed justo urna, non viverra ante. Suspendisse congue, sem non convallis fringilla, est nisl varius nunc, id laoreet nisl neque in elit. Fusce posuere euismod mattis.
+
+Curabitur a massa vitae lectus laoreet eleifend. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc malesuada turpis vehicula velit placerat ut venenatis eros ultricies. Sed nulla ligula, pretium vestibulum sagittis sed, ornare at sem. Fusce ultrices, nibh non rhoncus semper, massa tortor eleifend ante, ac mollis odio arcu ac velit. Aenean quis augue eget lectus sollicitudin accumsan. Curabitur non tortor eros, eu condimentum nulla. Phasellus at sapien ac nibh pretium condimentum. Nulla rhoncus eros vel lorem ultricies dignissim. Donec tempor, risus in mollis pretium, justo urna fermentum mi, id varius ipsum ipsum quis felis. Mauris mollis diam quis lacus laoreet sit amet ultrices felis hendrerit. Nam ac dui nisi. Cras vel risus turpis.
+
+Vivamus eleifend sapien pulvinar libero blandit ullamcorper. Morbi vitae nisl eros, sit amet porttitor erat. Donec varius velit eu tellus feugiat a tempor nunc pellentesque. Morbi sed est libero. Nulla in turpis molestie orci posuere interdum vel vel erat. Curabitur tempus eros id sem scelerisque euismod. Pellentesque varius egestas metus, id cursus massa condimentum non. Donec sagittis ultricies lacus, sit amet iaculis magna bibendum vel. Nulla cursus velit vitae neque ultricies id bibendum dui eleifend. Pellentesque porttitor rutrum interdum. Fusce nulla mi, elementum vitae sagittis id, luctus id urna.
+
+Proin nec ornare magna. Morbi euismod sapien dolor, sed consectetur nisl. In erat dui, tristique ut fringilla sit amet, imperdiet eu sem. Quisque tristique augue sodales nunc malesuada nec varius lectus laoreet. In hac habitasse platea dictumst. Vestibulum a dolor leo, ut interdum lectus. Etiam eu tortor augue, nec tristique metus. Maecenas gravida mauris a ligula vulputate consequat. Suspendisse potenti. Proin id quam magna. Etiam at ipsum augue. Nam tincidunt bibendum mi, ac vehicula tellus pretium eu. Vivamus consectetur risus id enim aliquam et laoreet tortor lacinia. Phasellus interdum dapibus orci eu imperdiet. Nulla egestas, ipsum non rhoncus suscipit, tellus purus porttitor elit, et tempus arcu odio ut justo. Etiam id lorem sed velit sagittis consequat. Duis diam sem, scelerisque in mollis non, tempor eu elit. Nullam molestie blandit dapibus. Nullam interdum laoreet iaculis.
+
+Cras sagittis luctus risus vel placerat. In in justo eget nisi pellentesque varius ut quis mi. Cras eleifend leo ultricies metus auctor accumsan. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Etiam vel velit orci, nec tristique metus. Aliquam viverra leo sit amet leo viverra tincidunt. Praesent pellentesque nisl vehicula leo fringilla blandit. Duis dignissim tincidunt placerat. Quisque ornare pellentesque nisi, a tempor odio laoreet sed. Ut dapibus dolor cursus arcu suscipit in facilisis nunc ornare. Suspendisse consectetur pulvinar tellus eget rutrum. Aenean sagittis egestas diam, sit amet posuere lorem euismod vitae.
+
+Morbi sit amet leo metus, non vehicula ligula. Mauris nec sem sit amet ipsum feugiat sodales id vitae risus. Nullam viverra nisi at erat vestibulum dictum. Morbi et nulla magna. Proin a augue neque, sit amet tristique orci. Suspendisse ornare lorem sodales augue vehicula nec varius turpis hendrerit. Praesent nulla augue, euismod ut pretium id, luctus vel mauris. Morbi eu elit eu augue scelerisque gravida. Sed porta tortor a magna mattis volutpat. Nullam vitae tellus quam, et rhoncus dolor. Nulla ultrices nunc nec mauris mattis in blandit nibh placerat. Nam velit arcu, ultrices a imperdiet eget, pulvinar vel augue. Sed at sapien magna. Nullam accumsan nulla in nulla bibendum molestie sollicitudin lorem faucibus. In nisl tortor, tincidunt ac molestie non, commodo ut dolor. Nullam non nunc enim. Mauris ultrices, dui nec scelerisque hendrerit, erat orci feugiat eros, sed elementum ligula ipsum at velit.
+
+Donec sit amet nisi at est aliquam euismod in eget justo. Ut justo turpis, lobortis quis accumsan sit amet, suscipit non lorem. Duis pulvinar lorem at magna porttitor tristique. Duis tortor mauris, auctor sit amet feugiat in, luctus et risus. Nunc lacinia, arcu id convallis lobortis, nibh sapien scelerisque dui, ac volutpat ante tellus nec odio. In euismod viverra nibh non fringilla. Nunc non nisl risus, at interdum nunc. Phasellus porta tempus aliquam. Cras massa tellus, aliquet a dignissim sed, posuere nec massa. Vestibulum et nisi nulla. Donec ut nisi ante. Sed ac justo eu ligula varius hendrerit a sed justo. Fusce ornare eleifend nisl, at condimentum arcu lobortis ut. Mauris neque felis, viverra ut dignissim dignissim, faucibus et lectus. Aenean laoreet tristique massa id congue.
+
+Mauris accumsan elit quis augue consectetur faucibus. Donec blandit, libero in tincidunt volutpat, purus est gravida eros, ut accumsan orci felis eu purus. Nam est nibh, tincidunt ut faucibus quis, consequat at est. Fusce nec diam ligula. Morbi eu ipsum purus, non semper neque. Maecenas in lacus arcu, vel imperdiet turpis. Curabitur eget nunc velit, in consequat nulla. Donec magna tortor, faucibus vitae hendrerit ac, pretium sed ipsum. Etiam pulvinar cursus enim facilisis consectetur. Maecenas pretium pellentesque nulla, nec viverra risus placerat sed. Nam rutrum justo id augue venenatis ut feugiat risus ultricies. Sed vitae risus nec velit rutrum faucibus at vel orci. Ut feugiat mi eu dui condimentum sit amet suscipit ligula imperdiet. Sed eu bibendum augue. Ut sit amet pulvinar libero. Duis luctus urna tincidunt purus porta euismod.
+
+Suspendisse ullamcorper mi congue lacus volutpat aliquam. Nam pharetra vestibulum enim. Aliquam erat volutpat. Ut convallis consequat neque. Donec commodo vulputate fermentum. Suspendisse potenti. Mauris at nibh ac felis blandit sagittis at sed velit. Morbi fringilla consequat eleifend. Duis lobortis, erat at vulputate posuere, odio diam sodales turpis, ut iaculis tortor leo ac risus. Proin blandit eleifend lacus ac imperdiet. Quisque consequat mollis elementum. Proin hendrerit odio ut orci tempor porta et non enim.
+
+Nullam luctus sagittis molestie. Proin sollicitudin rhoncus condimentum. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Pellentesque mattis, eros non ultrices sodales, libero sem iaculis dui, tempus porttitor libero nibh a eros. Donec et mauris imperdiet arcu semper luctus aliquet dictum turpis. Sed porttitor scelerisque vehicula. Sed eget metus elit, ac accumsan massa. Nam et diam quis purus rhoncus ultrices. Proin dapibus malesuada metus eu elementum. Aliquam luctus lorem non massa ornare non tincidunt quam ultricies. Vestibulum convallis diam id urna vestibulum aliquet. Nulla facilisi. Vestibulum nec egestas turpis. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aenean purus elit, vestibulum quis vehicula vel, auctor vel odio. Mauris eu tellus nunc.
+
+Sed placerat, massa et adipiscing aliquam, massa neque gravida velit, vitae consectetur quam velit non augue. Vivamus et eros metus, et aliquet diam. Aliquam a dui sem. Aenean pretium lacus ut massa faucibus in iaculis sapien pellentesque. Integer odio nibh, condimentum et condimentum vitae, ullamcorper sit amet odio. Fusce vel velit ut diam imperdiet interdum eget at nibh. Suspendisse potenti. Proin vestibulum, ante nec scelerisque volutpat, sapien purus porta ante, at gravida arcu urna consequat dolor. Praesent lorem magna, fringilla quis faucibus id, ultrices sollicitudin risus. Etiam leo lectus, viverra eu laoreet in, sollicitudin eget felis.
+
+Vestibulum tincidunt enim ac diam commodo id placerat erat lacinia. Duis egestas ante venenatis est ullamcorper viverra. Fusce suscipit eleifend velit quis sollicitudin. Donec felis libero, ullamcorper tincidunt luctus eget, fermentum a risus. Phasellus placerat egestas dui, sit amet aliquet arcu tincidunt sit amet. Suspendisse pharetra pellentesque ante sed egestas. Ut sit amet nibh urna, quis tincidunt arcu. Fusce sed sapien in diam rutrum pretium. Duis eu congue diam. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed placerat fermentum ligula eu pretium. Aliquam vitae orci nibh.
+
+Donec justo nisi, congue non ultrices eget, pharetra sit amet nunc. Etiam ullamcorper massa vel mauris semper posuere viverra nisi aliquet. Nullam mi tortor, feugiat sed viverra eu, congue id mi. Vestibulum nec enim sit amet libero dapibus hendrerit eget ac diam. Sed at massa nisl, a placerat tellus. Donec hendrerit tempus scelerisque. Nulla facilisi. Vivamus nec ipsum nisl, ut tempor mi. Integer ornare augue et orci scelerisque sed condimentum lectus scelerisque. Sed mauris lacus, egestas a laoreet facilisis, venenatis at ipsum. Aenean vel ante sed tortor sodales faucibus. Curabitur quis magna quis quam ultrices luctus vitae ac neque. Vivamus sed tortor et purus adipiscing consectetur hendrerit non eros. Vivamus et tristique erat. Maecenas eu quam nibh, sit amet fermentum ante. Fusce adipiscing congue nulla sodales condimentum. Nulla viverra dapibus enim vel rutrum. Mauris sodales varius metus sed gravida.
+
+Suspendisse potenti. Aliquam erat volutpat. Integer et diam purus, et semper erat. Proin ornare, lectus ac congue tincidunt, erat sapien ultrices erat, ac sagittis enim nulla faucibus ligula. In malesuada velit eu velit tincidunt et vestibulum nibh auctor. Integer in felis justo. Nullam in lorem lacus, eget sagittis odio. Quisque congue lorem vitae massa laoreet tempor. Quisque congue magna quis eros cursus vel luctus tellus gravida. Vivamus risus nibh, cursus pulvinar porttitor in, accumsan id orci. Donec hendrerit velit vel sem tristique porta. Vestibulum libero elit, aliquam et blandit nec, convallis id sem.
+
+Cras et odio urna. Sed ut semper metus. In hac habitasse platea dictumst. In hac habitasse platea dictumst. In nec augue eget sapien lacinia porta. Phasellus odio neque, tempus nec commodo at, vehicula ut lacus. Nullam accumsan ultricies placerat. Mauris tincidunt, erat ultrices placerat tincidunt, libero erat tempus nunc, eu consectetur risus est vel mauris. Duis in justo at augue lobortis molestie. Donec ut sem sed orci gravida tristique in at magna. Aliquam pellentesque, justo non mattis egestas, dolor purus aliquam elit, at blandit lectus neque non enim. Fusce sed turpis nisl, quis varius ligula. Proin id enim in neque scelerisque ultrices non id magna. Aenean tortor lectus, viverra eu elementum et, fringilla non arcu. Mauris eget odio eget enim aliquet fringilla. Proin pretium, libero eget dignissim rhoncus, ante sapien accumsan diam, a accumsan nibh neque id dolor. Sed est ante, euismod nec pulvinar sed, faucibus at turpis. Integer fringilla consequat sagittis. Sed elit ipsum, laoreet id viverra in, ornare sed massa.
+
+Praesent eget ligula quis orci condimentum congue ut id dolor. Ut eleifend, dui eu lacinia luctus, magna tellus consectetur nunc, gravida placerat elit risus sed nibh. Integer tristique ornare nibh, eu cursus ante ultrices et. Etiam vehicula pharetra purus quis aliquam. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut at velit mauris. Nulla in ipsum ante, vel sollicitudin quam. Integer a justo ut mi tempor vulputate quis malesuada magna. Ut lacinia ligula nec massa tincidunt at vehicula tortor facilisis. Donec malesuada volutpat adipiscing. Donec iaculis mi at est venenatis consequat. Sed risus sem, accumsan ut dapibus sit amet, laoreet sed mauris.
+
+Phasellus quis euismod sem. Praesent sit amet odio libero. Proin ullamcorper lectus nec arcu pulvinar vitae commodo nunc porttitor. Sed accumsan tellus et nisl dictum vel ornare neque porttitor. Morbi id egestas massa. Nunc condimentum leo vitae nibh pulvinar facilisis. Nunc elit ligula, commodo sed mollis et, ullamcorper et risus. Curabitur risus justo, viverra vel malesuada quis, convallis vitae tortor. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Suspendisse potenti. Phasellus interdum nulla a est hendrerit quis scelerisque ante convallis. Duis suscipit dolor nec lectus rhoncus vestibulum. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum pellentesque pulvinar tellus quis rhoncus.
+
+Integer et pellentesque lorem. Maecenas blandit laoreet justo, non interdum nulla pellentesque sed. Nullam rutrum justo et nibh varius convallis. Praesent rhoncus eleifend ante vitae venenatis. Suspendisse ullamcorper sem at tortor fermentum suscipit sit amet in libero. Aliquam erat volutpat. Suspendisse egestas accumsan tortor, quis egestas lacus vulputate et. Sed vitae turpis in purus volutpat consectetur. Duis imperdiet nisi non augue iaculis dictum. Cras ut ipsum enim, vitae convallis urna. Sed ornare, lorem ac pellentesque iaculis, urna augue egestas arcu, nec mollis dolor tortor non justo. Cras adipiscing, massa vel tristique dignissim, dolor arcu sollicitudin mauris, eget luctus tortor purus in velit. Aenean suscipit erat et dui sagittis elementum. Mauris elementum, lorem et placerat fringilla, ante enim luctus nisl, id posuere dolor urna vel metus. Proin ligula mi, elementum fermentum rhoncus eget, sagittis at eros. Integer fringilla porta varius. Nullam dignissim semper tempus.
+
+Curabitur leo nibh, cursus vitae ultrices id, vulputate sit amet arcu. Aenean vitae lectus turpis, et gravida odio. Praesent mattis sagittis diam, ut fermentum justo euismod et. Nam pharetra, nibh non gravida dignissim, ipsum leo malesuada augue, egestas semper ipsum est sed tortor. Sed quis malesuada elit. In hac habitasse platea dictumst. Mauris ornare aliquet purus, scelerisque gravida orci pretium sed. Nunc sed orci massa, vel molestie lectus. Quisque eget adipiscing odio. Donec vestibulum justo dui, quis malesuada urna. Donec pretium tellus eget erat condimentum ornare. Sed sem urna, rutrum nec elementum ac, ornare vel enim. Fusce pellentesque varius ultricies. Suspendisse vulputate consectetur erat, ut pellentesque felis congue sit amet. Maecenas nisi tellus, fringilla a aliquam sit amet, consectetur eget felis. Maecenas nec urna at lacus posuere hendrerit nec sit amet nisl. Proin quis ligula eu mauris volutpat hendrerit. In interdum bibendum ultricies. Cras sit amet neque at felis sodales scelerisque. Etiam et vulputate sem.
+
+Fusce neque nulla, pharetra sit amet varius eget, aliquam vel tortor. Curabitur a odio velit. Phasellus tempus luctus vulputate. Aenean et libero pulvinar velit aliquet vulputate. In hac habitasse platea dictumst. Etiam at massa urna, eu pulvinar elit. Vestibulum lectus risus, tempor eget cursus ac, fermentum a augue. Maecenas at neque at lacus mollis elementum quis id tellus. Pellentesque ultricies eleifend urna, at blandit augue commodo non. Praesent tincidunt mauris sit amet enim posuere ullamcorper. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. In euismod dignissim ligula, et tincidunt justo hendrerit id.
+
+Donec mi eros, bibendum id suscipit vel, posuere non tortor. Praesent enim urna, posuere at mollis vel, commodo sit amet urna. Vestibulum quis arcu quam. Fusce risus tortor, tempor vel mollis sit amet, ornare et lacus. Quisque sit amet velit justo. Nam vitae erat et nisi ultrices vehicula in quis nunc. Curabitur tristique elit eu nibh fringilla eget adipiscing elit sollicitudin. Nulla nec leo vel enim luctus scelerisque a id augue. Quisque interdum rhoncus elit, consectetur viverra felis fringilla id. Mauris volutpat ultricies nibh quis euismod. Quisque mattis semper purus et aliquet. Praesent vestibulum pulvinar quam, a dictum enim mattis non. Aenean mollis vehicula lorem, vel cursus leo venenatis id. Vivamus dapibus bibendum diam, at ultricies massa interdum et. Nulla lobortis aliquet nisi, non vehicula elit commodo in. Donec commodo, elit vel malesuada suscipit, urna lacus feugiat mi, ac mollis metus enim nec mi. Quisque fermentum, quam quis commodo luctus, quam ligula rhoncus urna, vel molestie ipsum risus ut nulla. Donec mi ligula, pulvinar vel convallis sed, volutpat eu urna. Curabitur a gravida lorem. Quisque sagittis felis ac urna laoreet quis pretium dolor congue.
+
+Proin vehicula diam id odio laoreet in suscipit quam blandit. Nullam sed ante at augue iaculis dignissim et quis ligula. Integer cursus posuere egestas. Duis turpis lacus, bibendum sit amet hendrerit ut, tincidunt vestibulum ante. Maecenas faucibus velit sit amet erat hendrerit et sodales neque scelerisque. Proin sit amet risus pharetra justo tincidunt accumsan ut posuere urna. In massa odio, viverra et pretium at, lobortis non tellus. Aliquam facilisis eleifend facilisis. Maecenas a risus id ante semper ultricies nec nec quam. Curabitur elementum, arcu ut fermentum luctus, nulla lorem accumsan mauris, vitae elementum felis enim ullamcorper lectus. Nulla facilisi. Etiam at turpis sed turpis viverra posuere. Praesent porta mattis mi id feugiat.
+
+Fusce commodo sodales erat quis sodales. Vestibulum dolor felis, interdum semper consectetur eu, mollis eget turpis. Integer accumsan elit sit amet libero dapibus eu viverra tortor porttitor. Ut pulvinar mattis tellus, non pulvinar erat dignissim vitae. Donec sagittis tincidunt quam, in auctor est euismod eu. Aenean feugiat luctus dolor at tincidunt. Aenean a mi sed lacus porta dapibus. Pellentesque ligula est, ultricies vitae tincidunt nec, placerat quis ipsum. Morbi dignissim libero sed nunc mollis feugiat. Vivamus mauris ante, venenatis eget sodales pharetra, vestibulum a ipsum. Vestibulum suscipit tempor sem, sagittis bibendum diam vehicula tempor. Proin at imperdiet dui. Mauris et metus quis mauris tincidunt tempor.
+
+Nunc pulvinar scelerisque magna non lobortis. Pellentesque eget risus mauris, sed suscipit lectus. Nam pharetra magna non urna vehicula rutrum. Duis adipiscing elementum porta. Donec eleifend enim vitae justo ultrices sodales. Nunc facilisis dui nec justo pretium blandit eu in est. Sed turpis lectus, imperdiet ac convallis ut, adipiscing vel mauris. Nulla commodo sollicitudin ante, ut vestibulum leo sollicitudin vitae. Curabitur imperdiet tellus sed tellus tincidunt porta. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Phasellus convallis viverra vulputate. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae;
+
+Sed at mi tortor. Morbi ac sapien nisl. Etiam at sollicitudin nisl. Vestibulum vulputate varius tortor. Donec id magna dolor, non molestie sem. Nullam dolor elit, vulputate ac convallis quis, adipiscing id neque. Aliquam at justo justo, non sagittis nunc. Nullam quis sem libero, non suscipit quam. Duis nec ipsum metus. Praesent nec turpis quam, non malesuada nisi. Praesent ultricies suscipit sollicitudin. Pellentesque at massa nec nisl aliquet dapibus eget et dolor. Vestibulum ullamcorper dui sit amet erat imperdiet varius. In porttitor ultrices purus in imperdiet. Maecenas at erat fringilla tellus ultricies placerat id ut nulla. Aliquam tempus condimentum nunc, in molestie erat laoreet et. Phasellus at erat in massa luctus facilisis quis id purus. Duis dui turpis, gravida in aliquet sit amet, condimentum sed magna. Praesent non tellus in nunc aliquam dictum quis a enim.
+
+Maecenas sed neque velit, ut iaculis neque. Morbi leo arcu, volutpat non sodales ut, volutpat in ligula. Curabitur blandit neque ac arcu lobortis egestas. Nunc id odio ante, in sodales quam. Suspendisse condimentum est et massa bibendum malesuada. Sed fermentum tellus vel lorem dignissim fermentum. Maecenas pretium est sit amet dui congue viverra. Nullam vestibulum accumsan sagittis. Phasellus sit amet justo leo. Pellentesque ut sem lectus, elementum convallis nisl. Pellentesque dictum porttitor nisi, vel feugiat dui interdum nec. Phasellus arcu risus, convallis sit amet sodales in, imperdiet sed lacus. Mauris sed quam sit amet est venenatis sodales. Ut eleifend quam in enim bibendum eu rutrum erat placerat. Nunc faucibus massa ac augue dignissim venenatis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed sed commodo urna. Nam eu urna tortor, eu pharetra magna. Pellentesque tortor elit, molestie sit amet rhoncus eget, aliquam a quam. Phasellus vel nunc et sem pellentesque hendrerit.
+
+Aliquam eu arcu ac felis volutpat scelerisque. Morbi ut dignissim nibh. Nullam convallis, odio a aliquet dignissim, purus leo elementum augue, vitae tristique neque dolor eu nulla. Vivamus sit amet massa a augue lacinia vehicula et vel dolor. Etiam sapien sem, consequat vel vehicula id, pellentesque at augue. Donec est neque, consequat ac convallis in, suscipit sed tortor. Maecenas imperdiet, dolor sit amet congue congue, metus urna suscipit libero, ut congue nisl sapien facilisis est. Nunc eget orci odio, ut aliquam dolor. Fusce nec leo eu enim sollicitudin pharetra in nec sapien. Cras id nisi vitae ipsum semper vehicula. Nunc eu magna ac felis vehicula eleifend vel non felis.
+
+Vestibulum mattis dapibus mauris varius pretium. Nulla facilisi. Morbi quis euismod turpis. Nunc dignissim molestie consectetur. Quisque a mattis ipsum. Ut viverra leo sed odio faucibus sodales. Sed placerat luctus mattis. Aenean auctor iaculis placerat. Pellentesque lorem dui, pharetra id faucibus eget, iaculis egestas diam. Sed a metus tellus, eu aliquam dolor. Pellentesque eget nunc urna. Ut placerat erat in velit ornare luctus.
+
+Proin pharetra enim non lectus fringilla eu varius diam fermentum. Etiam tellus quam, sagittis a pellentesque in, tincidunt non ipsum. Vivamus id faucibus metus. Aliquam sodales venenatis massa nec lacinia. Pellentesque a urna a quam accumsan sollicitudin. Donec feugiat ante a urna aliquam ut laoreet neque molestie. Sed metus erat, hendrerit ornare tempus ut, aliquet eget neque. Morbi rutrum, lectus sit amet dictum luctus, ante tellus molestie nunc, non interdum orci velit a lorem. Suspendisse scelerisque augue eu velit placerat ac iaculis est mattis. Mauris lorem quam, molestie vel tempus eget, tincidunt et est. Etiam sit amet risus ac tellus ultrices porta sit amet a nulla. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae;
+
+Fusce orci leo, tempor sed fermentum ut, rhoncus et erat. Integer a vulputate diam. Pellentesque luctus ornare varius. Quisque ornare tempus lacus quis porta. Integer consequat vestibulum eleifend. Nulla id eros eget odio eleifend vehicula. Duis ultricies ante eget massa vestibulum suscipit. Nunc et dui mi. Aliquam sit amet nunc neque, ut iaculis lorem. Nunc ornare lacinia mauris sed semper. Donec venenatis mollis urna at posuere. Etiam vestibulum dignissim magna nec hendrerit. Nullam interdum suscipit eros, ac sollicitudin mi semper in. Etiam eget feugiat augue. Etiam id imperdiet enim. Proin sed libero id quam dapibus sollicitudin. Cras suscipit dapibus nisi, quis sagittis dui consectetur vitae. Aenean lobortis congue sapien a pulvinar.
+
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas a diam in nulla porta hendrerit. Suspendisse massa ligula, tristique eu molestie quis, congue ut neque. Nullam vitae libero eget justo feugiat gravida ullamcorper at quam. Aenean eget interdum risus. Aliquam erat volutpat. Morbi odio purus, pharetra at cursus eget, tristique sit amet est. Pellentesque et turpis nisi, vitae vulputate dolor. Quisque odio nunc, condimentum ut mollis eget, laoreet pretium metus. Morbi vel est a nulla ultricies laoreet. Morbi ac ultrices eros. Fusce et pharetra leo. Pellentesque volutpat urna orci, sit amet scelerisque urna. Etiam vel orci mauris. Etiam sit amet lectus id massa elementum accumsan. Ut tincidunt ultricies lorem lacinia tempor.
+
+Mauris placerat massa at arcu ultricies sit amet malesuada urna sollicitudin. Pellentesque eleifend rhoncus ullamcorper. Fusce malesuada tincidunt lorem vel ullamcorper. Fusce non quam sapien. In hac habitasse platea dictumst. Praesent facilisis feugiat tempus. Quisque dictum placerat odio, vitae tincidunt lorem tincidunt in. Nam molestie, nisl id tempor auctor, erat nunc gravida nisi, nec vulputate tellus turpis tincidunt mi. Maecenas pretium porttitor lectus, vitae volutpat massa rutrum quis. Mauris ac sapien a arcu interdum condimentum ut quis urna. Mauris ligula neque, malesuada non rutrum et, condimentum ac velit. Sed condimentum neque at eros placerat placerat. Sed porttitor nibh non ipsum vehicula auctor commodo velit lobortis. Aliquam auctor elementum elementum.
+
+Nam aliquam pretium purus vel auctor. Mauris et arcu vel libero adipiscing dictum fermentum sed metus. Mauris dictum elit sed neque pharetra ac facilisis ante volutpat. Ut ut aliquam ligula. Duis vitae tortor nibh. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Morbi varius pulvinar purus id vehicula. Proin sit amet libero at leo varius egestas. Vestibulum posuere porttitor felis, nec lacinia orci rhoncus non. Suspendisse potenti. Maecenas ut tempor felis. Cras at ipsum vitae tellus luctus aliquet. Nulla mauris erat, feugiat et condimentum id, adipiscing sed tellus. Nunc condimentum luctus auctor. In hac habitasse platea dictumst. Cras libero ante, commodo at adipiscing ut, consectetur ut metus. Maecenas eros augue, cursus cursus porta vitae, ullamcorper egestas tortor. Nullam ante felis, viverra in convallis quis, gravida sit amet velit.
+
+Duis consectetur sagittis enim ut dignissim. Integer ut augue at odio vehicula tincidunt. Nam sapien tortor, euismod et suscipit eu, euismod in tellus. Nam ornare orci ac nulla consequat quis semper risus aliquam. Nunc tristique turpis et lacus venenatis a fermentum odio placerat. Morbi condimentum, enim ac tristique rutrum, sem nisi rhoncus orci, id mollis purus justo ut dui. Nulla facilisi. Suspendisse consectetur odio rhoncus ante porttitor ac eleifend metus suscipit. In porttitor tempus massa quis dictum. Integer in orci nibh. Duis nec risus eu nunc sagittis mattis at vitae nunc. Donec sed mi sed ante fermentum posuere nec a est. Quisque vel massa quam. Pellentesque feugiat massa venenatis risus bibendum sit amet dapibus lectus gravida. Mauris nunc lorem, interdum sit amet pulvinar vitae, euismod id mi. Suspendisse turpis elit, lobortis ac fringilla at, aliquet eget libero. Quisque eleifend ullamcorper pharetra. Fusce vitae eros tortor, sed pulvinar neque. Praesent pretium, felis quis adipiscing laoreet, sapien turpis molestie erat, malesuada pretium urna purus id ante. Aliquam ac massa sit amet sapien scelerisque convallis.
+
+Quisque eget libero leo. In nec diam vitae metus varius tempus vitae non purus. Phasellus porttitor, lectus vel aliquam tincidunt, nisl odio volutpat diam, nec ultrices elit quam eget lectus. Sed mollis purus at ipsum porta tempus. Sed rhoncus nisi vel magna rhoncus vitae tristique massa tempor. Etiam metus ligula, hendrerit eu accumsan vitae, euismod ac mi. Suspendisse dui turpis, congue ut fringilla et, laoreet eu enim. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Nunc aliquet lorem quis dolor pharetra tempor. Integer molestie varius laoreet. Curabitur ultrices nibh sit amet elit condimentum sit amet sagittis elit venenatis. Aliquam magna nunc, suscipit sed aliquam in, fringilla vel libero. Nunc eget elit risus. Suspendisse imperdiet, magna vel pulvinar sodales, metus velit accumsan mi, sed venenatis erat dolor eget turpis. Proin lacinia tincidunt semper. Fusce vestibulum sodales massa, a dapibus libero lobortis a. Pellentesque augue mauris, posuere sed faucibus eget, molestie at ante. Proin orci nunc, auctor vel auctor vitae, ultricies sit amet lectus. Integer at nunc eget diam tincidunt suscipit vitae et libero. Donec ac quam tortor, in vestibulum leo.
+
+Praesent laoreet pharetra libero, quis cursus erat tincidunt ac. Vivamus euismod odio vel erat placerat sed vehicula eros rutrum. Sed fermentum, lectus feugiat feugiat dictum, quam sapien commodo tellus, vel ornare urna felis interdum est. Integer condimentum lectus eu nulla lacinia ut porta turpis tempor. Pellentesque quis semper justo. Duis malesuada faucibus condimentum. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Proin condimentum est quis urna pulvinar vehicula. Nam convallis enim non nibh elementum blandit. Curabitur dui urna, aliquam sed posuere eget, porttitor et tortor. Nam vitae velit dignissim quam porttitor congue sed quis massa. Cras sed diam vestibulum nisl pretium rutrum vel at ipsum. In eget euismod sem. Pellentesque vitae sem et augue vehicula pretium sit amet et quam. Proin enim nunc, malesuada vel lobortis non, viverra non leo. Donec eu convallis nibh. Fusce sodales orci nec felis vulputate interdum at in sem. Nulla facilisi.
+
+Nunc posuere orci sed diam fringilla ullamcorper. Vivamus laoreet condimentum purus sit amet consequat. Donec at tristique ipsum. Donec tincidunt, nisi sit amet commodo sagittis, velit diam eleifend nulla, sed faucibus enim arcu eget nisi. Quisque condimentum laoreet ante vel posuere. Aliquam sit amet massa quis orci placerat posuere ut at velit. Ut eu commodo nisi. Pellentesque ornare quam et lorem facilisis nec venenatis ligula dictum. Aliquam vel arcu diam. Nullam ut elit nec lorem eleifend tincidunt vel sed orci. In vulputate semper felis, id tincidunt neque mollis a. Quisque eu nisi non justo vehicula pellentesque. Maecenas nec sem nibh, dictum sagittis nibh. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. In diam purus, commodo eget hendrerit eget, aliquam a sapien. Sed et justo eros. Etiam eget massa urna, non gravida enim. Cras ac ornare ligula.
+
+Suspendisse potenti. Sed non suscipit arcu. Mauris augue elit, porttitor non hendrerit id, egestas a eros. Nunc id orci magna. Fusce massa urna, gravida et porttitor ac, posuere eget nisl. Proin sed.
+Here is the last line there is no return
diff --git a/testing/mozbase/mozdevice/sut_tests/test-files/smalltext.txt b/testing/mozbase/mozdevice/sut_tests/test-files/smalltext.txt
new file mode 100644
index 000000000..9ec831b83
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test-files/smalltext.txt
@@ -0,0 +1 @@
+this is a short text file
diff --git a/testing/mozbase/mozdevice/sut_tests/test-files/test_script.sh b/testing/mozbase/mozdevice/sut_tests/test-files/test_script.sh
new file mode 100644
index 000000000..4f56dae89
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test-files/test_script.sh
@@ -0,0 +1 @@
+echo $THE_ANSWER
diff --git a/testing/mozbase/mozdevice/sut_tests/test_datachannel.py b/testing/mozbase/mozdevice/sut_tests/test_datachannel.py
new file mode 100644
index 000000000..99b71a584
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_datachannel.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+import socket
+from time import strptime
+
+from dmunit import DeviceManagerTestCase, heartbeat_port
+
+
+class DataChannelTestCase(DeviceManagerTestCase):
+
+ runs_on_test_device = False
+
+ def runTest(self):
+ """This tests the heartbeat and the data channel.
+ """
+ ip = self.dm.host
+
+ # Let's connect
+ self._datasock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ # Assume 60 seconds between heartbeats
+ self._datasock.settimeout(float(60 * 2))
+ self._datasock.connect((ip, heartbeat_port))
+ self._connected = True
+
+ # Let's listen
+ numbeats = 0
+ capturedHeader = False
+ while numbeats < 3:
+ data = self._datasock.recv(1024)
+ print data
+ self.assertNotEqual(len(data), 0)
+
+ # Check for the header
+ if not capturedHeader:
+ m = re.match(r"(.*?) trace output", data)
+ self.assertNotEqual(m, None,
+ 'trace output line does not match. The line: ' + str(data))
+ capturedHeader = True
+
+ # Check for standard heartbeat messsage
+ m = re.match(r"(.*?) Thump thump - (.*)", data)
+ if m is None:
+ # This isn't an error, it usually means we've obtained some
+ # unexpected data from the device
+ continue
+
+ # Ensure it matches our format
+ mHeartbeatTime = m.group(1)
+ mHeartbeatTime = strptime(mHeartbeatTime, "%Y%m%d-%H:%M:%S")
+ numbeats = numbeats + 1
diff --git a/testing/mozbase/mozdevice/sut_tests/test_exec.py b/testing/mozbase/mozdevice/sut_tests/test_exec.py
new file mode 100644
index 000000000..e262bd1ad
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_exec.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import posixpath
+from StringIO import StringIO
+
+from dmunit import DeviceManagerTestCase
+
+
+class ExecTestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """Simple exec test, does not use env vars."""
+ out = StringIO()
+ filename = posixpath.join(self.dm.deviceRoot, 'test_exec_file')
+ # Make sure the file was not already there
+ self.dm.removeFile(filename)
+ self.dm.shell(['dd', 'if=/dev/zero', 'of=%s' % filename, 'bs=1024',
+ 'count=1'], out)
+ # Check that the file has been created
+ self.assertTrue(self.dm.fileExists(filename))
+ # Clean up
+ self.dm.removeFile(filename)
diff --git a/testing/mozbase/mozdevice/sut_tests/test_exec_env.py b/testing/mozbase/mozdevice/sut_tests/test_exec_env.py
new file mode 100644
index 000000000..bf7029e7d
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_exec_env.py
@@ -0,0 +1,32 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+from StringIO import StringIO
+
+from dmunit import DeviceManagerTestCase
+
+
+class ExecEnvTestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """Exec test with env vars."""
+ # Push the file
+ localfile = os.path.join('test-files', 'test_script.sh')
+ remotefile = posixpath.join(self.dm.deviceRoot, 'test_script.sh')
+ self.dm.pushFile(localfile, remotefile)
+
+ # Run the cmd
+ out = StringIO()
+ self.dm.shell(['sh', remotefile], out, env={'THE_ANSWER': 42})
+
+ # Rewind the output file
+ out.seek(0)
+ # Make sure first line is 42
+ line = out.readline()
+ self.assertTrue(int(line) == 42)
+
+ # Clean up
+ self.dm.removeFile(remotefile)
diff --git a/testing/mozbase/mozdevice/sut_tests/test_fileExists.py b/testing/mozbase/mozdevice/sut_tests/test_fileExists.py
new file mode 100644
index 000000000..27822bc95
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_fileExists.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import tempfile
+import posixpath
+
+from dmunit import DeviceManagerTestCase
+
+
+class FileExistsTestCase(DeviceManagerTestCase):
+ """This tests the "fileExists" command.
+ """
+
+ def testOnRoot(self):
+ self.assertTrue(self.dm.fileExists('/'))
+
+ def testOnNonexistent(self):
+ self.assertFalse(self.dm.fileExists('/doesNotExist'))
+
+ def testOnRegularFile(self):
+ remote_path = posixpath.join(self.dm.deviceRoot, 'testFile')
+ self.assertFalse(self.dm.fileExists(remote_path))
+ with tempfile.NamedTemporaryFile() as f:
+ self.dm.pushFile(f.name, remote_path)
+ self.assertTrue(self.dm.fileExists(remote_path))
+ self.dm.removeFile(remote_path)
+
+ def testOnDirectory(self):
+ remote_path = posixpath.join(self.dm.deviceRoot, 'testDir')
+ remote_path_file = posixpath.join(remote_path, 'testFile')
+ self.assertFalse(self.dm.fileExists(remote_path))
+ with tempfile.NamedTemporaryFile() as f:
+ self.dm.pushFile(f.name, remote_path_file)
+ self.assertTrue(self.dm.fileExists(remote_path))
+ self.dm.removeFile(remote_path_file)
+ self.dm.removeDir(remote_path)
diff --git a/testing/mozbase/mozdevice/sut_tests/test_getdir.py b/testing/mozbase/mozdevice/sut_tests/test_getdir.py
new file mode 100644
index 000000000..00ea8c9ae
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_getdir.py
@@ -0,0 +1,51 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+import shutil
+import tempfile
+
+from mozdevice.devicemanager import DMError
+from dmunit import DeviceManagerTestCase
+
+
+class GetDirectoryTestCase(DeviceManagerTestCase):
+
+ def _setUp(self):
+ self.localsrcdir = tempfile.mkdtemp()
+ os.makedirs(os.path.join(self.localsrcdir, 'push1', 'sub.1', 'sub.2'))
+ path = os.path.join(self.localsrcdir,
+ 'push1', 'sub.1', 'sub.2', 'testfile')
+ file(path, 'w').close()
+ os.makedirs(os.path.join(self.localsrcdir, 'push1', 'emptysub'))
+ self.localdestdir = tempfile.mkdtemp()
+ self.expected_filelist = ['emptysub', 'sub.1']
+
+ def tearDown(self):
+ shutil.rmtree(self.localsrcdir)
+ shutil.rmtree(self.localdestdir)
+
+ def runTest(self):
+ """This tests the getDirectory() function.
+ """
+ testroot = posixpath.join(self.dm.deviceRoot, 'infratest')
+ self.dm.removeDir(testroot)
+ self.dm.mkDir(testroot)
+ self.dm.pushDir(
+ os.path.join(self.localsrcdir, 'push1'),
+ posixpath.join(testroot, 'push1'))
+ # pushDir doesn't copy over empty directories, but we want to make sure
+ # that they are retrieved correctly.
+ self.dm.mkDir(posixpath.join(testroot, 'push1', 'emptysub'))
+ self.dm.getDirectory(posixpath.join(testroot, 'push1'),
+ os.path.join(self.localdestdir, 'push1'))
+ self.assertTrue(os.path.exists(
+ os.path.join(self.localdestdir,
+ 'push1', 'sub.1', 'sub.2', 'testfile')))
+ self.assertTrue(os.path.exists(
+ os.path.join(self.localdestdir, 'push1', 'emptysub')))
+ self.assertRaises(DMError, self.dm.getDirectory,
+ '/dummy', os.path.join(self.localdestdir, '/none'))
+ self.assertFalse(os.path.exists(self.localdestdir + '/none'))
diff --git a/testing/mozbase/mozdevice/sut_tests/test_info.py b/testing/mozbase/mozdevice/sut_tests/test_info.py
new file mode 100644
index 000000000..57bb4fce0
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_info.py
@@ -0,0 +1,20 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from dmunit import DeviceManagerTestCase
+
+
+class InfoTestCase(DeviceManagerTestCase):
+
+ runs_on_test_device = False
+
+ def runTest(self):
+ """This tests the "info" command.
+ """
+ cmds = ('os', 'id', 'systime', 'uptime', 'screen', 'memory', 'power')
+ for c in cmds:
+ data = self.dm.getInfo(c)
+ print c + str(data)
+
+ # No real good way to verify this. If it doesn't throw, we're ok.
diff --git a/testing/mozbase/mozdevice/sut_tests/test_prompt.py b/testing/mozbase/mozdevice/sut_tests/test_prompt.py
new file mode 100644
index 000000000..9980ee2ab
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_prompt.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+import socket
+
+from dmunit import DeviceManagerTestCase
+
+
+class PromptTestCase(DeviceManagerTestCase):
+
+ def tearDown(self):
+ if self.sock:
+ self.sock.close()
+
+ def runTest(self):
+ """This tests getting a prompt from the device.
+ """
+ self.sock = None
+ ip = self.dm.host
+ port = self.dm.port
+
+ promptre = re.compile('.*\$\>\x00')
+ data = ""
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.sock.connect((ip, int(port)))
+ data = self.sock.recv(1024)
+ print data
+ self.assertTrue(promptre.match(data))
diff --git a/testing/mozbase/mozdevice/sut_tests/test_ps.py b/testing/mozbase/mozdevice/sut_tests/test_ps.py
new file mode 100644
index 000000000..b36e61179
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_ps.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from dmunit import DeviceManagerTestCase
+
+
+class ProcessListTestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """This tests getting a process list from the device.
+ """
+ proclist = self.dm.getProcessList()
+
+ # This returns a process list of the form:
+ # [[<procid>, <procname>], [<procid>, <procname>], ...]
+ # on android the userID is affixed to the process array:
+ # [[<procid>, <procname>, <userid>], ...]
+
+ self.assertNotEqual(len(proclist), 0)
+
+ for item in proclist:
+ self.assertIsInstance(item[0], int)
+ self.assertIsInstance(item[1], str)
+ self.assertGreater(len(item[1]), 0)
+ if len(item) > 2:
+ self.assertIsInstance(item[2], int)
diff --git a/testing/mozbase/mozdevice/sut_tests/test_pull.py b/testing/mozbase/mozdevice/sut_tests/test_pull.py
new file mode 100644
index 000000000..753d8ddd5
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_pull.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import os
+import posixpath
+
+from dmunit import DeviceManagerTestCase
+from mozdevice.devicemanager import DMError
+
+
+class PullTestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """Tests the "pull" command with a binary file.
+ """
+ orig = hashlib.md5()
+ new = hashlib.md5()
+ local_test_file = os.path.join('test-files', 'mybinary.zip')
+ orig.update(file(local_test_file, 'r').read())
+
+ testroot = self.dm.deviceRoot
+ remote_test_file = posixpath.join(testroot, 'mybinary.zip')
+ self.dm.removeFile(remote_test_file)
+ self.dm.pushFile(local_test_file, remote_test_file)
+ new.update(self.dm.pullFile(remote_test_file))
+ # Use hexdigest() instead of digest() since values are printed
+ # if assert fails
+ self.assertEqual(orig.hexdigest(), new.hexdigest())
+
+ remote_missing_file = posixpath.join(testroot, 'doesnotexist')
+ self.dm.removeFile(remote_missing_file) # Just to be sure
+ self.assertRaises(DMError, self.dm.pullFile, remote_missing_file)
diff --git a/testing/mozbase/mozdevice/sut_tests/test_push1.py b/testing/mozbase/mozdevice/sut_tests/test_push1.py
new file mode 100644
index 000000000..f457d6cc5
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_push1.py
@@ -0,0 +1,38 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+
+from dmunit import DeviceManagerTestCase
+
+
+class Push1TestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """This tests copying a directory structure to the device.
+ """
+ dvroot = self.dm.deviceRoot
+ dvpath = posixpath.join(dvroot, 'infratest')
+ self.dm.removeDir(dvpath)
+ self.dm.mkDir(dvpath)
+
+ p1 = os.path.join('test-files', 'push1')
+ # Set up local stuff
+ try:
+ os.rmdir(p1)
+ except:
+ pass
+
+ if not os.path.exists(p1):
+ os.makedirs(os.path.join(p1, 'sub.1', 'sub.2'))
+ if not os.path.exists(os.path.join(p1, 'sub.1', 'sub.2', 'testfile')):
+ file(os.path.join(p1, 'sub.1', 'sub.2', 'testfile'), 'w').close()
+
+ self.dm.pushDir(p1, posixpath.join(dvpath, 'push1'))
+
+ self.assertTrue(
+ self.dm.dirExists(posixpath.join(dvpath, 'push1', 'sub.1')))
+ self.assertTrue(self.dm.dirExists(
+ posixpath.join(dvpath, 'push1', 'sub.1', 'sub.2')))
diff --git a/testing/mozbase/mozdevice/sut_tests/test_push2.py b/testing/mozbase/mozdevice/sut_tests/test_push2.py
new file mode 100644
index 000000000..5ccea509e
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_push2.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+
+from dmunit import DeviceManagerTestCase
+
+
+class Push2TestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """This tests copying a directory structure with files to the device.
+ """
+ testroot = posixpath.join(self.dm.deviceRoot, 'infratest')
+ self.dm.removeDir(testroot)
+ self.dm.mkDir(testroot)
+ path = posixpath.join(testroot, 'push2')
+ self.dm.pushDir(os.path.join('test-files', 'push2'), path)
+
+ # Let's walk the tree and make sure everything is there
+ # though it's kind of cheesy, we'll use the validate file to compare
+ # hashes - we use the client side hashing when testing the cat command
+ # specifically, so that makes this a little less cheesy, I guess.
+ self.assertTrue(
+ self.dm.dirExists(posixpath.join(testroot, 'push2', 'sub1')))
+ self.assertTrue(self.dm.validateFile(
+ posixpath.join(testroot, 'push2', 'sub1', 'file1.txt'),
+ os.path.join('test-files', 'push2', 'sub1', 'file1.txt')))
+ self.assertTrue(self.dm.validateFile(
+ posixpath.join(testroot, 'push2', 'sub1', 'sub1.1', 'file2.txt'),
+ os.path.join('test-files', 'push2', 'sub1', 'sub1.1', 'file2.txt')))
+ self.assertTrue(self.dm.validateFile(
+ posixpath.join(testroot, 'push2', 'sub2', 'file3.txt'),
+ os.path.join('test-files', 'push2', 'sub2', 'file3.txt')))
+ self.assertTrue(self.dm.validateFile(
+ posixpath.join(testroot, 'push2', 'file4.bin'),
+ os.path.join('test-files', 'push2', 'file4.bin')))
diff --git a/testing/mozbase/mozdevice/sut_tests/test_pushbinary.py b/testing/mozbase/mozdevice/sut_tests/test_pushbinary.py
new file mode 100644
index 000000000..86809dc1f
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_pushbinary.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+
+from dmunit import DeviceManagerTestCase
+
+
+class PushBinaryTestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """This tests copying a binary file.
+ """
+ testroot = self.dm.deviceRoot
+ self.dm.removeFile(posixpath.join(testroot, 'mybinary.zip'))
+ self.dm.pushFile(os.path.join('test-files', 'mybinary.zip'),
+ posixpath.join(testroot, 'mybinary.zip'))
diff --git a/testing/mozbase/mozdevice/sut_tests/test_pushsmalltext.py b/testing/mozbase/mozdevice/sut_tests/test_pushsmalltext.py
new file mode 100644
index 000000000..174b3b117
--- /dev/null
+++ b/testing/mozbase/mozdevice/sut_tests/test_pushsmalltext.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import posixpath
+
+from dmunit import DeviceManagerTestCase
+
+
+class PushSmallTextTestCase(DeviceManagerTestCase):
+
+ def runTest(self):
+ """This tests copying a small text file.
+ """
+ testroot = self.dm.deviceRoot
+ self.dm.removeFile(posixpath.join(testroot, 'smalltext.txt'))
+ self.dm.pushFile(os.path.join('test-files', 'smalltext.txt'),
+ posixpath.join(testroot, 'smalltext.txt'))
diff --git a/testing/mozbase/mozdevice/tests/droidsut_launch.py b/testing/mozbase/mozdevice/tests/droidsut_launch.py
new file mode 100644
index 000000000..b9872e096
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/droidsut_launch.py
@@ -0,0 +1,36 @@
+from sut import MockAgent
+import mozdevice
+import logging
+import unittest
+
+
+class LaunchTest(unittest.TestCase):
+
+ def test_nouserserial(self):
+ a = MockAgent(self, commands=[("ps",
+ "10029 549 com.android.launcher\n"
+ "10066 1198 com.twitter.android"),
+ ("info sutuserinfo", ""),
+ ("exec am start -W -n "
+ "org.mozilla.fennec/org.mozilla.gecko.BrowserApp -a "
+ "android.intent.action.VIEW",
+ "OK\nreturn code [0]")])
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port, logLevel=logging.DEBUG)
+ d.launchFennec("org.mozilla.fennec")
+ a.wait()
+
+ def test_userserial(self):
+ a = MockAgent(self, commands=[("ps",
+ "10029 549 com.android.launcher\n"
+ "10066 1198 com.twitter.android"),
+ ("info sutuserinfo", "User Serial:0"),
+ ("exec am start --user 0 -W -n "
+ "org.mozilla.fennec/org.mozilla.gecko.BrowserApp -a "
+ "android.intent.action.VIEW",
+ "OK\nreturn code [0]")])
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port, logLevel=logging.DEBUG)
+ d.launchFennec("org.mozilla.fennec")
+ a.wait()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/manifest.ini b/testing/mozbase/mozdevice/tests/manifest.ini
new file mode 100644
index 000000000..63825c85b
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/manifest.ini
@@ -0,0 +1,23 @@
+[DEFAULT]
+skip-if = os == 'win'
+
+[sut_app.py]
+[sut_basic.py]
+[sut_chmod.py]
+[sut_copytree.py]
+[sut_fileExists.py]
+[sut_fileMethods.py]
+[sut_info.py]
+[sut_ip.py]
+[sut_kill.py]
+[sut_list.py]
+[sut_logcat.py]
+[sut_mkdir.py]
+[sut_movetree.py]
+[sut_ps.py]
+[sut_push.py]
+[sut_pull.py]
+[sut_remove.py]
+[sut_time.py]
+[sut_unpackfile.py]
+[droidsut_launch.py]
diff --git a/testing/mozbase/mozdevice/tests/sut.py b/testing/mozbase/mozdevice/tests/sut.py
new file mode 100644
index 000000000..76a5ed313
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import datetime
+import socket
+import time
+
+from threading import Thread
+
+
+class MockAgent(object):
+
+ MAX_WAIT_TIME_SECONDS = 10
+ SOCKET_TIMEOUT_SECONDS = 5
+
+ def __init__(self, tester, start_commands=None, commands=[]):
+ if start_commands:
+ self.commands = start_commands
+ else:
+ self.commands = [("ver", "SUTAgentAndroid Version 1.14")]
+ self.commands = self.commands + commands
+
+ self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self._sock.bind(("127.0.0.1", 0))
+ self._sock.listen(1)
+
+ self.tester = tester
+
+ self.thread = Thread(target=self._serve_thread)
+ self.thread.start()
+
+ self.should_stop = False
+
+ @property
+ def port(self):
+ return self._sock.getsockname()[1]
+
+ def _serve_thread(self):
+ conn = None
+ while self.commands:
+ if not conn:
+ conn, addr = self._sock.accept()
+ conn.settimeout(self.SOCKET_TIMEOUT_SECONDS)
+ conn.send("$>\x00")
+ (command, response) = self.commands.pop(0)
+ data = ''
+ timeout = datetime.datetime.now() + datetime.timedelta(
+ seconds=self.MAX_WAIT_TIME_SECONDS)
+ # The data might come in chunks, particularly if we are expecting
+ # multiple lines, as with push commands.
+ while (len(data) < len(command) and
+ datetime.datetime.now() < timeout):
+ try:
+ data += conn.recv(1024)
+ except socket.timeout:
+ # We handle timeouts in the main loop.
+ pass
+ self.tester.assertEqual(data.strip(), command)
+ # send response and prompt separately to test for bug 789496
+ # FIXME: Improve the mock agent, since overloading the meaning
+ # of 'response' is getting confusing.
+ if response is None: # code for "shut down"
+ conn.shutdown(socket.SHUT_RDWR)
+ conn.close()
+ conn = None
+ elif type(response) is int: # code for "time out"
+ max_timeout = 15.0
+ timeout = 0.0
+ interval = 0.1
+ while not self.should_stop and timeout < max_timeout:
+ time.sleep(interval)
+ timeout += interval
+ if timeout >= max_timeout:
+ raise Exception("Maximum timeout reached! This should not "
+ "happen")
+ return
+ else:
+ # pull is handled specially, as we just pass back the full
+ # command line
+ if "pull" in command:
+ conn.send(response)
+ else:
+ conn.send("%s\n" % response)
+ conn.send("$>\x00")
+
+ def wait(self):
+ self.thread.join()
diff --git a/testing/mozbase/mozdevice/tests/sut_app.py b/testing/mozbase/mozdevice/tests/sut_app.py
new file mode 100644
index 000000000..0a5d996ae
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_app.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestApp(unittest.TestCase):
+
+ def test_getAppRoot(self):
+ command = [("getapproot org.mozilla.firefox",
+ "/data/data/org.mozilla.firefox")]
+
+ m = MockAgent(self, commands=command)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+
+ self.assertEqual(command[0][1], d.getAppRoot('org.mozilla.firefox'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_basic.py b/testing/mozbase/mozdevice/tests/sut_basic.py
new file mode 100644
index 000000000..666d4915c
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_basic.py
@@ -0,0 +1,73 @@
+from sut import MockAgent
+import mozdevice
+import logging
+import unittest
+
+
+class BasicTest(unittest.TestCase):
+
+ def test_init(self):
+ """Tests DeviceManager initialization."""
+ a = MockAgent(self)
+
+ mozdevice.DroidSUT("127.0.0.1", port=a.port, logLevel=logging.DEBUG)
+ # all testing done in device's constructor
+ a.wait()
+
+ def test_init_err(self):
+ """Tests error handling during initialization."""
+ a = MockAgent(self, start_commands=[("ver", "##AGENT-WARNING## No version")])
+ self.assertRaises(mozdevice.DMError,
+ lambda: mozdevice.DroidSUT("127.0.0.1",
+ port=a.port,
+ logLevel=logging.DEBUG))
+ a.wait()
+
+ def test_timeout_normal(self):
+ """Tests DeviceManager timeout, normal case."""
+ a = MockAgent(self, commands=[("isdir /mnt/sdcard/tests", "TRUE"),
+ ("cd /mnt/sdcard/tests", ""),
+ ("ls", "test.txt"),
+ ("rm /mnt/sdcard/tests/test.txt",
+ "Removed the file")])
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port, logLevel=logging.DEBUG)
+ ret = d.removeFile('/mnt/sdcard/tests/test.txt')
+ self.assertEqual(ret, None) # if we didn't throw an exception, we're ok
+ a.wait()
+
+ def test_timeout_timeout(self):
+ """Tests DeviceManager timeout, timeout case."""
+ a = MockAgent(self, commands=[("isdir /mnt/sdcard/tests", "TRUE"),
+ ("cd /mnt/sdcard/tests", ""),
+ ("ls", "test.txt"),
+ ("rm /mnt/sdcard/tests/test.txt", 0)])
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port, logLevel=logging.DEBUG)
+ d.default_timeout = 1
+ exceptionThrown = False
+ try:
+ d.removeFile('/mnt/sdcard/tests/test.txt')
+ except mozdevice.DMError:
+ exceptionThrown = True
+ self.assertEqual(exceptionThrown, True)
+ a.should_stop = True
+ a.wait()
+
+ def test_shell(self):
+ """Tests shell command"""
+ for cmd in [("exec foobar", False), ("execsu foobar", True)]:
+ for retcode in [1, 2]:
+ a = MockAgent(self, commands=[(cmd[0],
+ "\nreturn code [%s]" % retcode)])
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ exceptionThrown = False
+ try:
+ d.shellCheckOutput(["foobar"], root=cmd[1])
+ except mozdevice.DMError:
+ exceptionThrown = True
+ expectedException = (retcode != 0)
+ self.assertEqual(exceptionThrown, expectedException)
+
+ a.wait()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_chmod.py b/testing/mozbase/mozdevice/tests/sut_chmod.py
new file mode 100644
index 000000000..404330c03
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_chmod.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestChmod(unittest.TestCase):
+
+ def test_chmod(self):
+
+ command = [('chmod /mnt/sdcard/test',
+ 'Changing permissions for /storage/emulated/legacy/Test\n'
+ ' <empty>\n'
+ 'chmod /storage/emulated/legacy/Test ok\n')]
+ m = MockAgent(self, commands=command)
+ d = mozdevice.DroidSUT('127.0.0.1', port=m.port, logLevel=logging.DEBUG)
+
+ self.assertEqual(None, d.chmodDir('/mnt/sdcard/test'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_copytree.py b/testing/mozbase/mozdevice/tests/sut_copytree.py
new file mode 100644
index 000000000..ec22828d0
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_copytree.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class CopyTreeTest(unittest.TestCase):
+
+ def test_copyFile(self):
+ commands = [('dd if=/mnt/sdcard/tests/test.txt of=/mnt/sdcard/tests/test2.txt', ''),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'test.txt\ntest2.txt')]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+
+ self.assertEqual(None, d.copyTree('/mnt/sdcard/tests/test.txt',
+ '/mnt/sdcard/tests/test2.txt'))
+ expected = (commands[3][1].strip()).split('\n')
+ self.assertEqual(expected, d.listFiles('/mnt/sdcard/tests'))
+
+ def test_copyDir(self):
+ commands = [('dd if=/mnt/sdcard/tests/foo of=/mnt/sdcard/tests/bar', ''),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'foo\nbar')]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port,
+ logLevel=logging.DEBUG)
+
+ self.assertEqual(None, d.copyTree('/mnt/sdcard/tests/foo',
+ '/mnt/sdcard/tests/bar'))
+ expected = (commands[3][1].strip()).split('\n')
+ self.assertEqual(expected, d.listFiles('/mnt/sdcard/tests'))
+
+ def test_copyNonEmptyDir(self):
+ commands = [('isdir /mnt/sdcard/tests/foo/bar', 'TRUE'),
+ ('dd if=/mnt/sdcard/tests/foo of=/mnt/sdcard/tests/foo2', ''),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'foo\nfoo2'),
+ ('isdir /mnt/sdcard/tests/foo2', 'TRUE'),
+ ('cd /mnt/sdcard/tests/foo2', ''),
+ ('ls', 'bar')]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port,
+ logLevel=logging.DEBUG)
+
+ self.assertTrue(d.dirExists('/mnt/sdcard/tests/foo/bar'))
+ self.assertEqual(None, d.copyTree('/mnt/sdcard/tests/foo',
+ '/mnt/sdcard/tests/foo2'))
+ expected = (commands[4][1].strip()).split('\n')
+ self.assertEqual(expected, d.listFiles('/mnt/sdcard/tests'))
+ self.assertTrue(d.fileExists('/mnt/sdcard/tests/foo2/bar'))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_fileExists.py b/testing/mozbase/mozdevice/tests/sut_fileExists.py
new file mode 100644
index 000000000..702fd2de3
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_fileExists.py
@@ -0,0 +1,29 @@
+from sut import MockAgent
+import mozdevice
+import unittest
+
+
+class FileExistsTest(unittest.TestCase):
+
+ commands = [('isdir /', 'TRUE'),
+ ('cd /', ''),
+ ('ls', 'init')]
+
+ def test_onRoot(self):
+ root_commands = [('isdir /', 'TRUE')]
+ a = MockAgent(self, commands=root_commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ self.assertTrue(d.fileExists('/'))
+
+ def test_onNonexistent(self):
+ a = MockAgent(self, commands=self.commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ self.assertFalse(d.fileExists('/doesNotExist'))
+
+ def test_onRegularFile(self):
+ a = MockAgent(self, commands=self.commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ self.assertTrue(d.fileExists('/init'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_fileMethods.py b/testing/mozbase/mozdevice/tests/sut_fileMethods.py
new file mode 100644
index 000000000..142950a81
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_fileMethods.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+import hashlib
+import mozdevice
+import logging
+import shutil
+import tempfile
+import unittest
+from sut import MockAgent
+
+
+class TestFileMethods(unittest.TestCase):
+ """ Class to test misc file methods """
+
+ content = "What is the answer to the life, universe and everything? 42"
+ h = hashlib.md5()
+ h.update(content)
+ temp_hash = h.hexdigest()
+
+ def test_validateFile(self):
+
+ with tempfile.NamedTemporaryFile() as f:
+ f.write(self.content)
+ f.flush()
+
+ # Test Valid Hashes
+ commands_valid = [("hash /sdcard/test/file", self.temp_hash)]
+
+ m = MockAgent(self, commands=commands_valid)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertTrue(d.validateFile('/sdcard/test/file', f.name))
+
+ # Test invalid hashes
+ commands_invalid = [("hash /sdcard/test/file", "0this0hash0is0completely0invalid")]
+
+ m = MockAgent(self, commands=commands_invalid)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertFalse(d.validateFile('/sdcard/test/file', f.name))
+
+ def test_getFile(self):
+
+ fname = "/mnt/sdcard/file"
+ commands = [("pull %s" % fname, "%s,%s\n%s" % (fname, len(self.content), self.content)),
+ ("hash %s" % fname, self.temp_hash)]
+
+ with tempfile.NamedTemporaryFile() as f:
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ # No error means success
+ self.assertEqual(None, d.getFile(fname, f.name))
+
+ def test_getDirectory(self):
+
+ fname = "/mnt/sdcard/file"
+ commands = [("isdir /mnt/sdcard", "TRUE"),
+ ("isdir /mnt/sdcard", "TRUE"),
+ ("cd /mnt/sdcard", ""),
+ ("ls", "file"),
+ ("isdir %s" % fname, "FALSE"),
+ ("pull %s" % fname, "%s,%s\n%s" % (fname, len(self.content), self.content)),
+ ("hash %s" % fname, self.temp_hash)]
+
+ tmpdir = tempfile.mkdtemp()
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual(None, d.getDirectory("/mnt/sdcard", tmpdir))
+
+ # Cleanup
+ shutil.rmtree(tmpdir)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_info.py b/testing/mozbase/mozdevice/tests/sut_info.py
new file mode 100644
index 000000000..93f3d4258
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_info.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import re
+import unittest
+from sut import MockAgent
+
+
+class TestGetInfo(unittest.TestCase):
+
+ commands = {'os': ('info os', 'JDQ39'),
+ 'id': ('info id', '11:22:33:44:55:66'),
+ 'uptime': ('info uptime', '0 days 0 hours 7 minutes 0 seconds 0 ms'),
+ 'uptimemillis': ('info uptimemillis', '666'),
+ 'systime': ('info systime', '2013/04/2 12:42:00:007'),
+ 'screen': ('info screen', 'X:768 Y:1184'),
+ 'rotation': ('info rotation', 'ROTATION:0'),
+ 'memory': ('info memory', 'PA:1351032832, FREE: 878645248'),
+ 'process': ('info process', '1000 527 system\n'
+ '10091 3443 org.mozilla.firefox\n'
+ '10112 3137 com.mozilla.SUTAgentAndroid\n'
+ '10035 807 com.android.launcher'),
+ 'disk': ('info disk', '/data: 6084923392 total, 980922368 available\n'
+ '/system: 867999744 total, 332333056 available\n'
+ '/mnt/sdcard: 6084923392 total, 980922368 available'),
+ 'power': ('info power', 'Power status:\n'
+ ' AC power OFFLINE\n'
+ ' Battery charge LOW DISCHARGING\n'
+ ' Remaining charge: 20%\n'
+ ' Battery Temperature: 25.2 (c)'),
+ 'sutuserinfo': ('info sutuserinfo', 'User Serial:0'),
+ 'temperature': ('info temperature', 'Temperature: unknown')
+ }
+
+ def test_getInfo(self):
+
+ for directive in self.commands.keys():
+ m = MockAgent(self, commands=[self.commands[directive]])
+ d = mozdevice.DroidSUT('127.0.0.1', port=m.port, logLevel=logging.DEBUG)
+
+ expected = re.sub(r'\ +', ' ', self.commands[directive][1]).split('\n')
+ # Account for slightly different return format for 'process'
+ if directive is 'process':
+ expected = [[x] for x in expected]
+
+ self.assertEqual(d.getInfo(directive=directive)[directive], expected)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_ip.py b/testing/mozbase/mozdevice/tests/sut_ip.py
new file mode 100644
index 000000000..31428a624
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_ip.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestGetIP(unittest.TestCase):
+ """ class to test IP methods """
+
+ commands = [('exec ifconfig eth0', 'eth0: ip 192.168.0.1 '
+ 'mask 255.255.255.0 flags [up broadcast running multicast]\n'
+ 'return code [0]'),
+ ('exec ifconfig wlan0', 'wlan0: ip 10.1.39.126\n'
+ 'mask 255.255.0.0 flags [up broadcast running multicast]\n'
+ 'return code [0]'),
+ ('exec ifconfig fake0', '##AGENT-WARNING## [ifconfig] '
+ 'command with arg(s) = [fake0] is currently not implemented.')
+ ]
+
+ def test_getIP_eth0(self):
+ m = MockAgent(self, commands=[self.commands[0]])
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual('192.168.0.1', d.getIP(interfaces=['eth0']))
+
+ def test_getIP_wlan0(self):
+ m = MockAgent(self, commands=[self.commands[1]])
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual('10.1.39.126', d.getIP(interfaces=['wlan0']))
+
+ def test_getIP_error(self):
+ m = MockAgent(self, commands=[self.commands[2]])
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertRaises(mozdevice.DMError, d.getIP, interfaces=['fake0'])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_kill.py b/testing/mozbase/mozdevice/tests/sut_kill.py
new file mode 100644
index 000000000..fea2f57e0
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_kill.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestKill(unittest.TestCase):
+
+ def test_killprocess(self):
+ commands = [("ps", "1000 1486 com.android.settings\n"
+ "10016 420 com.android.location.fused\n"
+ "10023 335 com.android.systemui\n"),
+ ("kill com.android.settings",
+ "Successfully killed com.android.settings\n")]
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ # No error raised means success
+ self.assertEqual(None, d.killProcess("com.android.settings"))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_list.py b/testing/mozbase/mozdevice/tests/sut_list.py
new file mode 100644
index 000000000..a319fd725
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_list.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestListFiles(unittest.TestCase):
+ commands = [("isdir /mnt/sdcard", "TRUE"),
+ ("cd /mnt/sdcard", ""),
+ ("ls", "Android\nMusic\nPodcasts\nRingtones\nAlarms\n"
+ "Notifications\nPictures\nMovies\nDownload\nDCIM\n")]
+
+ def test_listFiles(self):
+ m = MockAgent(self, commands=self.commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+
+ expected = (self.commands[2][1].strip()).split("\n")
+ self.assertEqual(expected, d.listFiles("/mnt/sdcard"))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_logcat.py b/testing/mozbase/mozdevice/tests/sut_logcat.py
new file mode 100644
index 000000000..b4c1a742d
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_logcat.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestLogCat(unittest.TestCase):
+ """ Class to test methods associated with logcat """
+
+ def test_getLogcat(self):
+
+ logcat_output = (
+ "07-17 00:51:10.377 I/SUTAgentAndroid( 2933): onCreate\r\n"
+ "07-17 00:51:10.457 D/dalvikvm( 2933): GC_CONCURRENT freed 351K, 17% free 2523K/3008K, paused 5ms+2ms, total 38ms\r\n" # noqa
+ "07-17 00:51:10.497 I/SUTAgentAndroid( 2933): Caught exception creating file in /data/local/tmp: open failed: EACCES (Permission denied)\r\n" # noqa
+ "07-17 00:51:10.507 E/SUTAgentAndroid( 2933): ERROR: Cannot access world writeable test root\r\n" # noqa
+ "07-17 00:51:10.547 D/GeckoHealthRec( 3253): Initializing profile cache.\r\n"
+ "07-17 00:51:10.607 D/GeckoHealthRec( 3253): Looking for /data/data/org.mozilla.fennec/files/mozilla/c09kfhne.default/times.json\r\n" # noqa
+ "07-17 00:51:10.637 D/GeckoHealthRec( 3253): Using times.json for profile creation time.\r\n" # noqa
+ "07-17 00:51:10.707 D/GeckoHealthRec( 3253): Incorporating environment: times.json profile creation = 1374026758604\r\n" # noqa
+ "07-17 00:51:10.507 D/GeckoHealthRec( 3253): Requested prefs.\r\n"
+ "07-17 06:50:54.907 I/SUTAgentAndroid( 3876): \r\n"
+ "07-17 06:50:54.907 I/SUTAgentAndroid( 3876): Total Private Dirty Memory 3176 kb\r\n" # noqa
+ "07-17 06:50:54.907 I/SUTAgentAndroid( 3876): Total Proportional Set Size Memory 5679 kb\r\n" # noqa
+ "07-17 06:50:54.907 I/SUTAgentAndroid( 3876): Total Shared Dirty Memory 9216 kb\r\n" # noqa
+ "07-17 06:55:21.627 I/SUTAgentAndroid( 3876): 127.0.0.1 : execsu /system/bin/logcat -v time -d dalvikvm:I " # noqa
+ "ConnectivityService:S WifiMonitor:S WifiStateTracker:S wpa_supplicant:S NetworkStateTracker:S\r\n" # noqa
+ "07-17 06:55:21.827 I/dalvikvm-heap( 3876): Grow heap (frag case) to 3.019MB for 102496-byte allocation\r\n" # noqa
+ "return code [0]")
+
+ inp = ("execsu /system/bin/logcat -v time -d "
+ "dalvikvm:I ConnectivityService:S WifiMonitor:S "
+ "WifiStateTracker:S wpa_supplicant:S NetworkStateTracker:S")
+
+ commands = [(inp, logcat_output)]
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual(logcat_output[:-17].replace('\r\n', '\n').splitlines(True), d.getLogcat())
+
+ def test_recordLogcat(self):
+
+ commands = [("execsu /system/bin/logcat -c", "return code [0]")]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ # No error raised means success
+ self.assertEqual(None, d.recordLogcat())
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_mkdir.py b/testing/mozbase/mozdevice/tests/sut_mkdir.py
new file mode 100644
index 000000000..bacaae324
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_mkdir.py
@@ -0,0 +1,78 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class MkDirsTest(unittest.TestCase):
+
+ def test_mkdirs(self):
+ subTests = [{'cmds': [('isdir /mnt/sdcard/baz/boop', 'FALSE'),
+ ('info os', 'android'),
+ ('isdir /mnt', 'TRUE'),
+ ('isdir /mnt/sdcard', 'TRUE'),
+ ('isdir /mnt/sdcard/baz', 'FALSE'),
+ ('mkdr /mnt/sdcard/baz',
+ '/mnt/sdcard/baz successfully created'),
+ ('isdir /mnt/sdcard/baz/boop', 'FALSE'),
+ ('mkdr /mnt/sdcard/baz/boop',
+ '/mnt/sdcard/baz/boop successfully created')],
+ 'expectException': False},
+ {'cmds': [('isdir /mnt/sdcard/baz/boop', 'FALSE'),
+ ('info os', 'android'),
+ ('isdir /mnt', 'TRUE'),
+ ('isdir /mnt/sdcard', 'TRUE'),
+ ('isdir /mnt/sdcard/baz', 'FALSE'),
+ ('mkdr /mnt/sdcard/baz',
+ "##AGENT-WARNING## "
+ "Could not create the directory /mnt/sdcard/baz")],
+ 'expectException': True},
+ ]
+ for subTest in subTests:
+ a = MockAgent(self, commands=subTest['cmds'])
+
+ exceptionThrown = False
+ try:
+ d = mozdevice.DroidSUT('127.0.0.1', port=a.port,
+ logLevel=logging.DEBUG)
+ d.mkDirs('/mnt/sdcard/baz/boop/bip')
+ except mozdevice.DMError:
+ exceptionThrown = True
+ self.assertEqual(exceptionThrown, subTest['expectException'])
+
+ a.wait()
+
+ def test_repeated_path_part(self):
+ """
+ Ensure that all dirs are created when last path part also found
+ earlier in the path (bug 826492).
+ """
+
+ cmds = [('isdir /mnt/sdcard/foo', 'FALSE'),
+ ('info os', 'android'),
+ ('isdir /mnt', 'TRUE'),
+ ('isdir /mnt/sdcard', 'TRUE'),
+ ('isdir /mnt/sdcard/foo', 'FALSE'),
+ ('mkdr /mnt/sdcard/foo',
+ '/mnt/sdcard/foo successfully created')]
+ a = MockAgent(self, commands=cmds)
+ d = mozdevice.DroidSUT('127.0.0.1', port=a.port,
+ logLevel=logging.DEBUG)
+ d.mkDirs('/mnt/sdcard/foo/foo')
+ a.wait()
+
+ def test_mkdirs_on_root(self):
+ cmds = [('isdir /', 'TRUE')]
+ a = MockAgent(self, commands=cmds)
+ d = mozdevice.DroidSUT('127.0.0.1', port=a.port,
+ logLevel=logging.DEBUG)
+ d.mkDirs('/foo')
+
+ a.wait()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_movetree.py b/testing/mozbase/mozdevice/tests/sut_movetree.py
new file mode 100644
index 000000000..0e106577c
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_movetree.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class MoveTreeTest(unittest.TestCase):
+
+ def test_moveFile(self):
+ commands = [('mv /mnt/sdcard/tests/test.txt /mnt/sdcard/tests/test1.txt', ''),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'test1.txt'),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'test1.txt')]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual(None, d.moveTree('/mnt/sdcard/tests/test.txt',
+ '/mnt/sdcard/tests/test1.txt'))
+ self.assertFalse(d.fileExists('/mnt/sdcard/tests/test.txt'))
+ self.assertTrue(d.fileExists('/mnt/sdcard/tests/test1.txt'))
+
+ def test_moveDir(self):
+ commands = [("mv /mnt/sdcard/tests/foo /mnt/sdcard/tests/bar", ""),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'bar')]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual(None, d.moveTree('/mnt/sdcard/tests/foo',
+ '/mnt/sdcard/tests/bar'))
+ self.assertTrue(d.fileExists('/mnt/sdcard/tests/bar'))
+
+ def test_moveNonEmptyDir(self):
+ commands = [('isdir /mnt/sdcard/tests/foo/bar', 'TRUE'),
+ ('mv /mnt/sdcard/tests/foo /mnt/sdcard/tests/foo2', ''),
+ ('isdir /mnt/sdcard/tests', 'TRUE'),
+ ('cd /mnt/sdcard/tests', ''),
+ ('ls', 'foo2'),
+ ('isdir /mnt/sdcard/tests/foo2', 'TRUE'),
+ ('cd /mnt/sdcard/tests/foo2', ''),
+ ('ls', 'bar')]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port,
+ logLevel=logging.DEBUG)
+
+ self.assertTrue(d.dirExists('/mnt/sdcard/tests/foo/bar'))
+ self.assertEqual(None, d.moveTree('/mnt/sdcard/tests/foo',
+ '/mnt/sdcard/tests/foo2'))
+ self.assertTrue(d.fileExists('/mnt/sdcard/tests/foo2'))
+ self.assertTrue(d.fileExists('/mnt/sdcard/tests/foo2/bar'))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_ps.py b/testing/mozbase/mozdevice/tests/sut_ps.py
new file mode 100644
index 000000000..03f431c1d
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_ps.py
@@ -0,0 +1,50 @@
+from sut import MockAgent
+import mozdevice
+import unittest
+
+
+class PsTest(unittest.TestCase):
+
+ pscommands = [('ps',
+ "10029 549 com.android.launcher\n"
+ "10066 1198 com.twitter.android")]
+
+ bad_pscommands = [('ps',
+ "abcdef 549 com.android.launcher\n"
+ "10066 1198 com.twitter.android")]
+
+ def test_processList(self):
+ a = MockAgent(self,
+ commands=self.pscommands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ pslist = d.getProcessList()
+ self.assertEqual(len(pslist), 2)
+ self.assertEqual(pslist[0], [549, 'com.android.launcher', 10029])
+ self.assertEqual(pslist[1], [1198, 'com.twitter.android', 10066])
+
+ a.wait()
+
+ def test_badProcessList(self):
+ a = MockAgent(self,
+ commands=self.bad_pscommands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ exceptionTriggered = False
+ try:
+ d.getProcessList()
+ except mozdevice.DMError:
+ exceptionTriggered = True
+
+ self.assertTrue(exceptionTriggered)
+
+ a.wait()
+
+ def test_processExist(self):
+ for i in [('com.android.launcher', 549),
+ ('com.fennec.android', None)]:
+ a = MockAgent(self, commands=self.pscommands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ self.assertEqual(d.processExist(i[0]), i[1])
+ a.wait()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_pull.py b/testing/mozbase/mozdevice/tests/sut_pull.py
new file mode 100644
index 000000000..c9fcae42a
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_pull.py
@@ -0,0 +1,47 @@
+from sut import MockAgent
+import mozdevice
+import logging
+import unittest
+
+
+class PullTest(unittest.TestCase):
+
+ def test_pull_success(self):
+ for count in [1, 4, 1024, 2048]:
+ cheeseburgers = ""
+ for i in range(count):
+ cheeseburgers += "cheeseburgers"
+
+ # pull file is kind of gross, make sure we can still execute commands after it's done
+ remoteName = "/mnt/sdcard/cheeseburgers"
+ a = MockAgent(self, commands=[("pull %s" % remoteName,
+ "%s,%s\n%s" % (remoteName,
+ len(cheeseburgers),
+ cheeseburgers)),
+ ("isdir /mnt/sdcard", "TRUE")])
+
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port,
+ logLevel=logging.DEBUG)
+ pulledData = d.pullFile("/mnt/sdcard/cheeseburgers")
+ self.assertEqual(pulledData, cheeseburgers)
+ d.dirExists('/mnt/sdcard')
+
+ def test_pull_failure(self):
+
+ # this test simulates only receiving a few bytes of what we expect
+ # to be larger file
+ remoteName = "/mnt/sdcard/cheeseburgers"
+ a = MockAgent(self, commands=[("pull %s" % remoteName,
+ "%s,15\n%s" % (remoteName,
+ "cheeseburgh"))])
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port,
+ logLevel=logging.DEBUG)
+ exceptionThrown = False
+ try:
+ d.pullFile("/mnt/sdcard/cheeseburgers")
+ except mozdevice.DMError:
+ exceptionThrown = True
+ self.assertTrue(exceptionThrown)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_push.py b/testing/mozbase/mozdevice/tests/sut_push.py
new file mode 100644
index 000000000..023d5315c
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_push.py
@@ -0,0 +1,88 @@
+from sut import MockAgent
+import mozfile
+import mozdevice
+import logging
+import unittest
+import hashlib
+import tempfile
+import os
+
+
+class PushTest(unittest.TestCase):
+
+ def test_push(self):
+ pushfile = "1234ABCD"
+ mdsum = hashlib.md5()
+ mdsum.update(pushfile)
+ expectedResponse = mdsum.hexdigest()
+
+ # (good response, no exception), (bad response, exception)
+ for response in [(expectedResponse, False), ("BADHASH", True)]:
+ cmd = "push /mnt/sdcard/foobar %s\r\n%s" % (len(pushfile), pushfile)
+ a = MockAgent(self, commands=[("isdir /mnt/sdcard", "TRUE"),
+ (cmd, response[0])])
+ exceptionThrown = False
+ with tempfile.NamedTemporaryFile() as f:
+ try:
+ f.write(pushfile)
+ f.flush()
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port)
+ d.pushFile(f.name, '/mnt/sdcard/foobar')
+ except mozdevice.DMError:
+ exceptionThrown = True
+ self.assertEqual(exceptionThrown, response[1])
+ a.wait()
+
+ def test_push_dir(self):
+ pushfile = "1234ABCD"
+ mdsum = hashlib.md5()
+ mdsum.update(pushfile)
+ expectedFileResponse = mdsum.hexdigest()
+
+ tempdir = tempfile.mkdtemp()
+ self.addCleanup(mozfile.remove, tempdir)
+ complex_path = os.path.join(tempdir, "baz")
+ os.mkdir(complex_path)
+ f = tempfile.NamedTemporaryFile(dir=complex_path)
+ f.write(pushfile)
+ f.flush()
+
+ subTests = [{'cmds': [("isdir /mnt/sdcard/baz", "TRUE"),
+ ("push /mnt/sdcard/baz/%s %s\r\n%s" %
+ (os.path.basename(f.name), len(pushfile),
+ pushfile),
+ expectedFileResponse)],
+ 'expectException': False},
+ {'cmds': [("isdir /mnt/sdcard/baz", "TRUE"),
+ ("push /mnt/sdcard/baz/%s %s\r\n%s" %
+ (os.path.basename(f.name), len(pushfile),
+ pushfile),
+ "BADHASH")],
+ 'expectException': True},
+ {'cmds': [("isdir /mnt/sdcard/baz", "FALSE"),
+ ('info os', 'android'),
+ ("isdir /mnt", "FALSE"),
+ ("mkdr /mnt",
+ "##AGENT-WARNING## Could not create the directory /mnt")],
+ 'expectException': True},
+
+ ]
+
+ for subTest in subTests:
+ a = MockAgent(self, commands=subTest['cmds'])
+
+ exceptionThrown = False
+ try:
+ d = mozdevice.DroidSUT("127.0.0.1", port=a.port,
+ logLevel=logging.DEBUG)
+ d.pushDir(tempdir, "/mnt/sdcard")
+ except mozdevice.DMError:
+ exceptionThrown = True
+ self.assertEqual(exceptionThrown, subTest['expectException'])
+
+ a.wait()
+
+ # FIXME: delete directory when done
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_remove.py b/testing/mozbase/mozdevice/tests/sut_remove.py
new file mode 100644
index 000000000..636190186
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_remove.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestRemove(unittest.TestCase):
+
+ def test_removeDir(self):
+ commands = [("isdir /mnt/sdcard/test", "TRUE"),
+ ("rmdr /mnt/sdcard/test", "Deleting file(s) from "
+ "/storage/emulated/legacy/Moztest\n"
+ " <empty>\n"
+ "Deleting directory "
+ "/storage/emulated/legacy/Moztest\n")]
+
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ # No error implies we're all good
+ self.assertEqual(None, d.removeDir("/mnt/sdcard/test"))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_time.py b/testing/mozbase/mozdevice/tests/sut_time.py
new file mode 100644
index 000000000..11dc421cb
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_time.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestGetCurrentTime(unittest.TestCase):
+
+ def test_getCurrentTime(self):
+ command = [('clok', '1349980200')]
+
+ m = MockAgent(self, commands=command)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ self.assertEqual(d.getCurrentTime(), int(command[0][1]))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozdevice/tests/sut_unpackfile.py b/testing/mozbase/mozdevice/tests/sut_unpackfile.py
new file mode 100644
index 000000000..1a531fe17
--- /dev/null
+++ b/testing/mozbase/mozdevice/tests/sut_unpackfile.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+import mozdevice
+import logging
+import unittest
+from sut import MockAgent
+
+
+class TestUnpack(unittest.TestCase):
+
+ def test_unpackFile(self):
+
+ commands = [("unzp /data/test/sample.zip /data/test/",
+ "Checksum: 653400271\n"
+ "1 of 1 successfully extracted\n")]
+ m = MockAgent(self, commands=commands)
+ d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=logging.DEBUG)
+ # No error being thrown imples all is well
+ self.assertEqual(None, d.unpackFile("/data/test/sample.zip",
+ "/data/test/"))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozfile/mozfile/__init__.py b/testing/mozbase/mozfile/mozfile/__init__.py
new file mode 100644
index 000000000..a527f0ad6
--- /dev/null
+++ b/testing/mozbase/mozfile/mozfile/__init__.py
@@ -0,0 +1,8 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from .mozfile import *
diff --git a/testing/mozbase/mozfile/mozfile/mozfile.py b/testing/mozbase/mozfile/mozfile/mozfile.py
new file mode 100644
index 000000000..94805594e
--- /dev/null
+++ b/testing/mozbase/mozfile/mozfile/mozfile.py
@@ -0,0 +1,449 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# We don't import all modules at the top for performance reasons. See Bug 1008943
+
+from __future__ import absolute_import
+
+from contextlib import contextmanager
+import errno
+import os
+import stat
+import time
+import warnings
+
+__all__ = ['extract_tarball',
+ 'extract_zip',
+ 'extract',
+ 'is_url',
+ 'load',
+ 'move',
+ 'remove',
+ 'rmtree',
+ 'tree',
+ 'NamedTemporaryFile',
+ 'TemporaryDirectory']
+
+# utilities for extracting archives
+
+
+def extract_tarball(src, dest):
+ """extract a .tar file"""
+
+ import tarfile
+
+ bundle = tarfile.open(src)
+ namelist = bundle.getnames()
+
+ for name in namelist:
+ bundle.extract(name, path=dest)
+ bundle.close()
+ return namelist
+
+
+def extract_zip(src, dest):
+ """extract a zip file"""
+
+ import zipfile
+
+ if isinstance(src, zipfile.ZipFile):
+ bundle = src
+ else:
+ try:
+ bundle = zipfile.ZipFile(src)
+ except Exception:
+ print "src: %s" % src
+ raise
+
+ namelist = bundle.namelist()
+
+ for name in namelist:
+ filename = os.path.realpath(os.path.join(dest, name))
+ if name.endswith('/'):
+ if not os.path.isdir(filename):
+ os.makedirs(filename)
+ else:
+ path = os.path.dirname(filename)
+ if not os.path.isdir(path):
+ os.makedirs(path)
+ _dest = open(filename, 'wb')
+ _dest.write(bundle.read(name))
+ _dest.close()
+ mode = bundle.getinfo(name).external_attr >> 16 & 0x1FF
+ # Only update permissions if attributes are set. Otherwise fallback to the defaults.
+ if mode:
+ os.chmod(filename, mode)
+ bundle.close()
+ return namelist
+
+
+def extract(src, dest=None):
+ """
+ Takes in a tar or zip file and extracts it to dest
+
+ If dest is not specified, extracts to os.path.dirname(src)
+
+ Returns the list of top level files that were extracted
+ """
+
+ import zipfile
+ import tarfile
+
+ assert os.path.exists(src), "'%s' does not exist" % src
+
+ if dest is None:
+ dest = os.path.dirname(src)
+ elif not os.path.isdir(dest):
+ os.makedirs(dest)
+ assert not os.path.isfile(dest), "dest cannot be a file"
+
+ if zipfile.is_zipfile(src):
+ namelist = extract_zip(src, dest)
+ elif tarfile.is_tarfile(src):
+ namelist = extract_tarball(src, dest)
+ else:
+ raise Exception("mozfile.extract: no archive format found for '%s'" %
+ src)
+
+ # namelist returns paths with forward slashes even in windows
+ top_level_files = [os.path.join(dest, name.rstrip('/')) for name in namelist
+ if len(name.rstrip('/').split('/')) == 1]
+
+ # namelist doesn't include folders, append these to the list
+ for name in namelist:
+ index = name.find('/')
+ if index != -1:
+ root = os.path.join(dest, name[:index])
+ if root not in top_level_files:
+ top_level_files.append(root)
+
+ return top_level_files
+
+
+# utilities for removal of files and directories
+
+def rmtree(dir):
+ """Deprecated wrapper method to remove a directory tree.
+
+ Ensure to update your code to use mozfile.remove() directly
+
+ :param dir: directory to be removed
+ """
+
+ warnings.warn("mozfile.rmtree() is deprecated in favor of mozfile.remove()",
+ PendingDeprecationWarning, stacklevel=2)
+ return remove(dir)
+
+
+def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5):
+ """
+ It's possible to see spurious errors on Windows due to various things
+ keeping a handle to the directory open (explorer, virus scanners, etc)
+ So we try a few times if it fails with a known error.
+ retry_delay is multiplied by the number of failed attempts to increase
+ the likelihood of success in subsequent attempts.
+ """
+ retry_count = 0
+ while True:
+ try:
+ func(*args)
+ except OSError as e:
+ # Error codes are defined in:
+ # http://docs.python.org/2/library/errno.html#module-errno
+ if e.errno not in (errno.EACCES, errno.ENOTEMPTY):
+ raise
+
+ if retry_count == retry_max:
+ raise
+
+ retry_count += 1
+
+ print '%s() failed for "%s". Reason: %s (%s). Retrying...' % \
+ (func.__name__, args, e.strerror, e.errno)
+ time.sleep(retry_count * retry_delay)
+ else:
+ # If no exception has been thrown it should be done
+ break
+
+
+def remove(path):
+ """Removes the specified file, link, or directory tree.
+
+ This is a replacement for shutil.rmtree that works better under
+ windows. It does the following things:
+
+ - check path access for the current user before trying to remove
+ - retry operations on some known errors due to various things keeping
+ a handle on file paths - like explorer, virus scanners, etc. The
+ known errors are errno.EACCES and errno.ENOTEMPTY, and it will
+ retry up to 5 five times with a delay of (failed_attempts * 0.5) seconds
+ between each attempt.
+
+ Note that no error will be raised if the given path does not exists.
+
+ :param path: path to be removed
+ """
+
+ import shutil
+
+ def _call_with_windows_retry(*args, **kwargs):
+ try:
+ _call_windows_retry(*args, **kwargs)
+ except OSError as e:
+ # The file or directory to be removed doesn't exist anymore
+ if e.errno != errno.ENOENT:
+ raise
+
+ def _update_permissions(path):
+ """Sets specified pemissions depending on filetype"""
+ if os.path.islink(path):
+ # Path is a symlink which we don't have to modify
+ # because it should already have all the needed permissions
+ return
+
+ stats = os.stat(path)
+
+ if os.path.isfile(path):
+ mode = stats.st_mode | stat.S_IWUSR
+ elif os.path.isdir(path):
+ mode = stats.st_mode | stat.S_IWUSR | stat.S_IXUSR
+ else:
+ # Not supported type
+ return
+
+ _call_with_windows_retry(os.chmod, (path, mode))
+
+ if not os.path.exists(path):
+ return
+
+ if os.path.isfile(path) or os.path.islink(path):
+ # Verify the file or link is read/write for the current user
+ _update_permissions(path)
+ _call_with_windows_retry(os.remove, (path,))
+
+ elif os.path.isdir(path):
+ # Verify the directory is read/write/execute for the current user
+ _update_permissions(path)
+
+ # We're ensuring that every nested item has writable permission.
+ for root, dirs, files in os.walk(path):
+ for entry in dirs + files:
+ _update_permissions(os.path.join(root, entry))
+ _call_with_windows_retry(shutil.rmtree, (path,))
+
+
+def move(src, dst):
+ """
+ Move a file or directory path.
+
+ This is a replacement for shutil.move that works better under windows,
+ retrying operations on some known errors due to various things keeping
+ a handle on file paths.
+ """
+ import shutil
+ _call_windows_retry(shutil.move, (src, dst))
+
+
+def depth(directory):
+ """returns the integer depth of a directory or path relative to '/' """
+
+ directory = os.path.abspath(directory)
+ level = 0
+ while True:
+ directory, remainder = os.path.split(directory)
+ level += 1
+ if not remainder:
+ break
+ return level
+
+
+# ASCII delimeters
+ascii_delimeters = {
+ 'vertical_line': '|',
+ 'item_marker': '+',
+ 'last_child': '\\'
+}
+
+# unicode delimiters
+unicode_delimeters = {
+ 'vertical_line': '│',
+ 'item_marker': '├',
+ 'last_child': 'â””'
+}
+
+
+def tree(directory,
+ item_marker=unicode_delimeters['item_marker'],
+ vertical_line=unicode_delimeters['vertical_line'],
+ last_child=unicode_delimeters['last_child'],
+ sort_key=lambda x: x.lower()):
+ """
+ display tree directory structure for `directory`
+ """
+
+ retval = []
+ indent = []
+ last = {}
+ top = depth(directory)
+
+ for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
+
+ abspath = os.path.abspath(dirpath)
+ basename = os.path.basename(abspath)
+ parent = os.path.dirname(abspath)
+ level = depth(abspath) - top
+
+ # sort articles of interest
+ for resource in (dirnames, filenames):
+ resource[:] = sorted(resource, key=sort_key)
+
+ if level > len(indent):
+ indent.append(vertical_line)
+ indent = indent[:level]
+
+ if dirnames:
+ files_end = item_marker
+ last[abspath] = dirnames[-1]
+ else:
+ files_end = last_child
+
+ if last.get(parent) == os.path.basename(abspath):
+ # last directory of parent
+ dirpath_mark = last_child
+ indent[-1] = ' '
+ elif not indent:
+ dirpath_mark = ''
+ else:
+ dirpath_mark = item_marker
+
+ # append the directory and piece of tree structure
+ # if the top-level entry directory, print as passed
+ retval.append('%s%s%s' % (''.join(indent[:-1]),
+ dirpath_mark,
+ basename if retval else directory))
+ # add the files
+ if filenames:
+ last_file = filenames[-1]
+ retval.extend([('%s%s%s' % (''.join(indent),
+ files_end if filename == last_file else item_marker,
+ filename))
+ for index, filename in enumerate(filenames)])
+
+ return '\n'.join(retval)
+
+
+# utilities for temporary resources
+
+class NamedTemporaryFile(object):
+ """
+ Like tempfile.NamedTemporaryFile except it works on Windows
+ in the case where you open the created file a second time.
+
+ This behaves very similarly to tempfile.NamedTemporaryFile but may
+ not behave exactly the same. For example, this function does not
+ prevent fd inheritance by children.
+
+ Example usage:
+
+ with NamedTemporaryFile() as fh:
+ fh.write(b'foobar')
+
+ print('Filename: %s' % fh.name)
+
+ see https://bugzilla.mozilla.org/show_bug.cgi?id=821362
+ """
+
+ def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='tmp',
+ dir=None, delete=True):
+
+ import tempfile
+ fd, path = tempfile.mkstemp(suffix, prefix, dir, 't' in mode)
+ os.close(fd)
+
+ self.file = open(path, mode)
+ self._path = path
+ self._delete = delete
+ self._unlinked = False
+
+ def __getattr__(self, k):
+ return getattr(self.__dict__['file'], k)
+
+ def __iter__(self):
+ return self.__dict__['file']
+
+ def __enter__(self):
+ self.file.__enter__()
+ return self
+
+ def __exit__(self, exc, value, tb):
+ self.file.__exit__(exc, value, tb)
+ if self.__dict__['_delete']:
+ os.unlink(self.__dict__['_path'])
+ self._unlinked = True
+
+ def __del__(self):
+ if self.__dict__['_unlinked']:
+ return
+ self.file.__exit__(None, None, None)
+ if self.__dict__['_delete']:
+ os.unlink(self.__dict__['_path'])
+
+
+@contextmanager
+def TemporaryDirectory():
+ """
+ create a temporary directory using tempfile.mkdtemp, and then clean it up.
+
+ Example usage:
+ with TemporaryDirectory() as tmp:
+ open(os.path.join(tmp, "a_temp_file"), "w").write("data")
+
+ """
+
+ import tempfile
+ import shutil
+
+ tempdir = tempfile.mkdtemp()
+ try:
+ yield tempdir
+ finally:
+ shutil.rmtree(tempdir)
+
+
+# utilities dealing with URLs
+
+def is_url(thing):
+ """
+ Return True if thing looks like a URL.
+ """
+
+ import urlparse
+
+ parsed = urlparse.urlparse(thing)
+ if 'scheme' in parsed:
+ return len(parsed.scheme) >= 2
+ else:
+ return len(parsed[0]) >= 2
+
+
+def load(resource):
+ """
+ open a file or URL for reading. If the passed resource string is not a URL,
+ or begins with 'file://', return a ``file``. Otherwise, return the
+ result of urllib2.urlopen()
+ """
+
+ import urllib2
+
+ # handle file URLs separately due to python stdlib limitations
+ if resource.startswith('file://'):
+ resource = resource[len('file://'):]
+
+ if not is_url(resource):
+ # if no scheme is given, it is a file path
+ return file(resource)
+
+ return urllib2.urlopen(resource)
diff --git a/testing/mozbase/mozfile/setup.py b/testing/mozbase/mozfile/setup.py
new file mode 100644
index 000000000..277ff7b52
--- /dev/null
+++ b/testing/mozbase/mozfile/setup.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_NAME = 'mozfile'
+PACKAGE_VERSION = '1.2'
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Library of file utilities for use in Mozilla testing",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozfile'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=[],
+ tests_require=['mozhttpd']
+ )
diff --git a/testing/mozbase/mozfile/tests/files/missing_file_attributes.zip b/testing/mozbase/mozfile/tests/files/missing_file_attributes.zip
new file mode 100644
index 000000000..2b5409e89
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/files/missing_file_attributes.zip
Binary files differ
diff --git a/testing/mozbase/mozfile/tests/manifest.ini b/testing/mozbase/mozfile/tests/manifest.ini
new file mode 100644
index 000000000..c7889beca
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/manifest.ini
@@ -0,0 +1,6 @@
+[test_extract.py]
+[test_load.py]
+[test_move_remove.py]
+[test_tempdir.py]
+[test_tempfile.py]
+[test_url.py]
diff --git a/testing/mozbase/mozfile/tests/stubs.py b/testing/mozbase/mozfile/tests/stubs.py
new file mode 100644
index 000000000..06d79e7af
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/stubs.py
@@ -0,0 +1,37 @@
+import os
+import shutil
+import tempfile
+
+
+# stub file paths
+files = [('foo.txt',),
+ ('foo', 'bar.txt',),
+ ('foo', 'bar', 'fleem.txt',),
+ ('foobar', 'fleem.txt',),
+ ('bar.txt',),
+ ('nested_tree', 'bar', 'fleem.txt',),
+ ('readonly.txt',),
+ ]
+
+
+def create_stub():
+ """create a stub directory"""
+
+ tempdir = tempfile.mkdtemp()
+ try:
+ for path in files:
+ fullpath = os.path.join(tempdir, *path)
+ dirname = os.path.dirname(fullpath)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ contents = path[-1]
+ f = file(fullpath, 'w')
+ f.write(contents)
+ f.close()
+ return tempdir
+ except Exception:
+ try:
+ shutil.rmtree(tempdir)
+ except:
+ pass
+ raise
diff --git a/testing/mozbase/mozfile/tests/test_extract.py b/testing/mozbase/mozfile/tests/test_extract.py
new file mode 100644
index 000000000..e91f52349
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/test_extract.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+
+import os
+import shutil
+import tarfile
+import tempfile
+import unittest
+import zipfile
+
+import mozfile
+
+import stubs
+
+
+class TestExtract(unittest.TestCase):
+ """test extracting archives"""
+
+ def ensure_directory_contents(self, directory):
+ """ensure the directory contents match"""
+ for f in stubs.files:
+ path = os.path.join(directory, *f)
+ exists = os.path.exists(path)
+ if not exists:
+ print "%s does not exist" % (os.path.join(f))
+ self.assertTrue(exists)
+ if exists:
+ contents = file(path).read().strip()
+ self.assertTrue(contents == f[-1])
+
+ def test_extract_zipfile(self):
+ """test extracting a zipfile"""
+ _zipfile = self.create_zip()
+ self.assertTrue(os.path.exists(_zipfile))
+ try:
+ dest = tempfile.mkdtemp()
+ try:
+ mozfile.extract_zip(_zipfile, dest)
+ self.ensure_directory_contents(dest)
+ finally:
+ shutil.rmtree(dest)
+ finally:
+ os.remove(_zipfile)
+
+ def test_extract_zipfile_missing_file_attributes(self):
+ """if files do not have attributes set the default permissions have to be inherited."""
+ _zipfile = os.path.join(os.path.dirname(__file__), 'files', 'missing_file_attributes.zip')
+ self.assertTrue(os.path.exists(_zipfile))
+ dest = tempfile.mkdtemp()
+ try:
+ # Get the default file permissions for the user
+ fname = os.path.join(dest, 'foo')
+ with open(fname, 'w'):
+ pass
+ default_stmode = os.stat(fname).st_mode
+
+ files = mozfile.extract_zip(_zipfile, dest)
+ for filename in files:
+ self.assertEqual(os.stat(os.path.join(dest, filename)).st_mode,
+ default_stmode)
+ finally:
+ shutil.rmtree(dest)
+
+ def test_extract_tarball(self):
+ """test extracting a tarball"""
+ tarball = self.create_tarball()
+ self.assertTrue(os.path.exists(tarball))
+ try:
+ dest = tempfile.mkdtemp()
+ try:
+ mozfile.extract_tarball(tarball, dest)
+ self.ensure_directory_contents(dest)
+ finally:
+ shutil.rmtree(dest)
+ finally:
+ os.remove(tarball)
+
+ def test_extract(self):
+ """test the generalized extract function"""
+
+ # test extracting a tarball
+ tarball = self.create_tarball()
+ self.assertTrue(os.path.exists(tarball))
+ try:
+ dest = tempfile.mkdtemp()
+ try:
+ mozfile.extract(tarball, dest)
+ self.ensure_directory_contents(dest)
+ finally:
+ shutil.rmtree(dest)
+ finally:
+ os.remove(tarball)
+
+ # test extracting a zipfile
+ _zipfile = self.create_zip()
+ self.assertTrue(os.path.exists(_zipfile))
+ try:
+ dest = tempfile.mkdtemp()
+ try:
+ mozfile.extract_zip(_zipfile, dest)
+ self.ensure_directory_contents(dest)
+ finally:
+ shutil.rmtree(dest)
+ finally:
+ os.remove(_zipfile)
+
+ # test extracting some non-archive; this should fail
+ fd, filename = tempfile.mkstemp()
+ os.write(fd, 'This is not a zipfile or tarball')
+ os.close(fd)
+ exception = None
+ try:
+ dest = tempfile.mkdtemp()
+ mozfile.extract(filename, dest)
+ except Exception as exception:
+ pass
+ finally:
+ os.remove(filename)
+ os.rmdir(dest)
+ self.assertTrue(isinstance(exception, Exception))
+
+ # utility functions
+
+ def create_tarball(self):
+ """create a stub tarball for testing"""
+ tempdir = stubs.create_stub()
+ filename = tempfile.mktemp(suffix='.tar')
+ archive = tarfile.TarFile(filename, mode='w')
+ try:
+ for path in stubs.files:
+ archive.add(os.path.join(tempdir, *path), arcname=os.path.join(*path))
+ except:
+ os.remove(archive)
+ raise
+ finally:
+ shutil.rmtree(tempdir)
+ archive.close()
+ return filename
+
+ def create_zip(self):
+ """create a stub zipfile for testing"""
+
+ tempdir = stubs.create_stub()
+ filename = tempfile.mktemp(suffix='.zip')
+ archive = zipfile.ZipFile(filename, mode='w')
+ try:
+ for path in stubs.files:
+ archive.write(os.path.join(tempdir, *path), arcname=os.path.join(*path))
+ except:
+ os.remove(filename)
+ raise
+ finally:
+ shutil.rmtree(tempdir)
+ archive.close()
+ return filename
diff --git a/testing/mozbase/mozfile/tests/test_load.py b/testing/mozbase/mozfile/tests/test_load.py
new file mode 100755
index 000000000..13a5b519c
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/test_load.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+"""
+tests for mozfile.load
+"""
+
+import mozhttpd
+import os
+import tempfile
+import unittest
+from mozfile import load
+
+
+class TestLoad(unittest.TestCase):
+ """test the load function"""
+
+ def test_http(self):
+ """test with mozhttpd and a http:// URL"""
+
+ def example(request):
+ """example request handler"""
+ body = 'example'
+ return (200, {'Content-type': 'text/plain',
+ 'Content-length': len(body)
+ }, body)
+
+ host = '127.0.0.1'
+ httpd = mozhttpd.MozHttpd(host=host,
+ urlhandlers=[{'method': 'GET',
+ 'path': '.*',
+ 'function': example}])
+ try:
+ httpd.start(block=False)
+ content = load(httpd.get_url()).read()
+ self.assertEqual(content, 'example')
+ finally:
+ httpd.stop()
+
+ def test_file_path(self):
+ """test loading from file path"""
+ try:
+ # create a temporary file
+ tmp = tempfile.NamedTemporaryFile(delete=False)
+ tmp.write('foo bar')
+ tmp.close()
+
+ # read the file
+ contents = file(tmp.name).read()
+ self.assertEqual(contents, 'foo bar')
+
+ # read the file with load and a file path
+ self.assertEqual(load(tmp.name).read(), contents)
+
+ # read the file with load and a file URL
+ self.assertEqual(load('file://%s' % tmp.name).read(), contents)
+ finally:
+ # remove the tempfile
+ if os.path.exists(tmp.name):
+ os.remove(tmp.name)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozfile/tests/test_move_remove.py b/testing/mozbase/mozfile/tests/test_move_remove.py
new file mode 100644
index 000000000..e9d0cd434
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/test_move_remove.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python
+
+import os
+import stat
+import shutil
+import threading
+import time
+import unittest
+import errno
+from contextlib import contextmanager
+
+import mozfile
+import mozinfo
+
+import stubs
+
+
+def mark_readonly(path):
+ """Removes all write permissions from given file/directory.
+
+ :param path: path of directory/file of which modes must be changed
+ """
+ mode = os.stat(path)[stat.ST_MODE]
+ os.chmod(path, mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH)
+
+
+class FileOpenCloseThread(threading.Thread):
+ """Helper thread for asynchronous file handling"""
+
+ def __init__(self, path, delay, delete=False):
+ threading.Thread.__init__(self)
+ self.file_opened = threading.Event()
+ self.delay = delay
+ self.path = path
+ self.delete = delete
+
+ def run(self):
+ with open(self.path):
+ self.file_opened.set()
+ time.sleep(self.delay)
+ if self.delete:
+ try:
+ os.remove(self.path)
+ except:
+ pass
+
+
+@contextmanager
+def wait_file_opened_in_thread(*args, **kwargs):
+ thread = FileOpenCloseThread(*args, **kwargs)
+ thread.start()
+ thread.file_opened.wait()
+ try:
+ yield thread
+ finally:
+ thread.join()
+
+
+class MozfileRemoveTestCase(unittest.TestCase):
+ """Test our ability to remove directories and files"""
+
+ def setUp(self):
+ # Generate a stub
+ self.tempdir = stubs.create_stub()
+
+ def tearDown(self):
+ if os.path.isdir(self.tempdir):
+ shutil.rmtree(self.tempdir)
+
+ def test_remove_directory(self):
+ """Test the removal of a directory"""
+ self.assertTrue(os.path.isdir(self.tempdir))
+ mozfile.remove(self.tempdir)
+ self.assertFalse(os.path.exists(self.tempdir))
+
+ def test_remove_directory_with_open_file(self):
+ """Test removing a directory with an open file"""
+ # Open a file in the generated stub
+ filepath = os.path.join(self.tempdir, *stubs.files[1])
+ f = file(filepath, 'w')
+ f.write('foo-bar')
+
+ # keep file open and then try removing the dir-tree
+ if mozinfo.isWin:
+ # On the Windows family WindowsError should be raised.
+ self.assertRaises(OSError, mozfile.remove, self.tempdir)
+ self.assertTrue(os.path.exists(self.tempdir))
+ else:
+ # Folder should be deleted on all other platforms
+ mozfile.remove(self.tempdir)
+ self.assertFalse(os.path.exists(self.tempdir))
+
+ def test_remove_closed_file(self):
+ """Test removing a closed file"""
+ # Open a file in the generated stub
+ filepath = os.path.join(self.tempdir, *stubs.files[1])
+ with open(filepath, 'w') as f:
+ f.write('foo-bar')
+
+ # Folder should be deleted on all platforms
+ mozfile.remove(self.tempdir)
+ self.assertFalse(os.path.exists(self.tempdir))
+
+ def test_removing_open_file_with_retry(self):
+ """Test removing a file in use with retry"""
+ filepath = os.path.join(self.tempdir, *stubs.files[1])
+
+ with wait_file_opened_in_thread(filepath, 0.2):
+ # on windows first attempt will fail,
+ # and it will be retried until the thread leave the handle
+ mozfile.remove(filepath)
+
+ # Check deletion was successful
+ self.assertFalse(os.path.exists(filepath))
+
+ def test_removing_already_deleted_file_with_retry(self):
+ """Test removing a meanwhile removed file with retry"""
+ filepath = os.path.join(self.tempdir, *stubs.files[1])
+
+ with wait_file_opened_in_thread(filepath, 0.2, True):
+ # on windows first attempt will fail, and before
+ # the retry the opened file will be deleted in the thread
+ mozfile.remove(filepath)
+
+ # Check deletion was successful
+ self.assertFalse(os.path.exists(filepath))
+
+ def test_remove_readonly_tree(self):
+ """Test removing a read-only directory"""
+
+ dirpath = os.path.join(self.tempdir, "nested_tree")
+ mark_readonly(dirpath)
+
+ # However, mozfile should change write permissions and remove dir.
+ mozfile.remove(dirpath)
+
+ self.assertFalse(os.path.exists(dirpath))
+
+ def test_remove_readonly_file(self):
+ """Test removing read-only files"""
+ filepath = os.path.join(self.tempdir, *stubs.files[1])
+ mark_readonly(filepath)
+
+ # However, mozfile should change write permission and then remove file.
+ mozfile.remove(filepath)
+
+ self.assertFalse(os.path.exists(filepath))
+
+ @unittest.skipIf(mozinfo.isWin, "Symlinks are not supported on Windows")
+ def test_remove_symlink(self):
+ """Test removing a symlink"""
+ file_path = os.path.join(self.tempdir, *stubs.files[1])
+ symlink_path = os.path.join(self.tempdir, 'symlink')
+
+ os.symlink(file_path, symlink_path)
+ self.assertTrue(os.path.islink(symlink_path))
+
+ # The linked folder and files should not be deleted
+ mozfile.remove(symlink_path)
+ self.assertFalse(os.path.exists(symlink_path))
+ self.assertTrue(os.path.exists(file_path))
+
+ @unittest.skipIf(mozinfo.isWin, "Symlinks are not supported on Windows")
+ def test_remove_symlink_in_subfolder(self):
+ """Test removing a folder with an contained symlink"""
+ file_path = os.path.join(self.tempdir, *stubs.files[0])
+ dir_path = os.path.dirname(os.path.join(self.tempdir, *stubs.files[1]))
+ symlink_path = os.path.join(dir_path, 'symlink')
+
+ os.symlink(file_path, symlink_path)
+ self.assertTrue(os.path.islink(symlink_path))
+
+ # The folder with the contained symlink will be deleted but not the
+ # original linked file
+ mozfile.remove(dir_path)
+ self.assertFalse(os.path.exists(dir_path))
+ self.assertFalse(os.path.exists(symlink_path))
+ self.assertTrue(os.path.exists(file_path))
+
+ @unittest.skipIf(mozinfo.isWin or not os.geteuid(),
+ "Symlinks are not supported on Windows and cannot run test as root")
+ def test_remove_symlink_for_system_path(self):
+ """Test removing a symlink which points to a system folder"""
+ symlink_path = os.path.join(self.tempdir, 'symlink')
+
+ os.symlink(os.path.dirname(self.tempdir), symlink_path)
+ self.assertTrue(os.path.islink(symlink_path))
+
+ # The folder with the contained symlink will be deleted but not the
+ # original linked file
+ mozfile.remove(symlink_path)
+ self.assertFalse(os.path.exists(symlink_path))
+
+ def test_remove_path_that_does_not_exists(self):
+ not_existing_path = os.path.join(self.tempdir, 'I_do_not_not_exists')
+ try:
+ mozfile.remove(not_existing_path)
+ except OSError as exc:
+ if exc.errno == errno.ENOENT:
+ self.fail("removing non existing path must not raise error")
+ raise
+
+
+class MozFileMoveTestCase(unittest.TestCase):
+
+ def setUp(self):
+ # Generate a stub
+ self.tempdir = stubs.create_stub()
+ self.addCleanup(mozfile.rmtree, self.tempdir)
+
+ def test_move_file(self):
+ file_path = os.path.join(self.tempdir, *stubs.files[1])
+ moved_path = file_path + '.moved'
+ self.assertTrue(os.path.isfile(file_path))
+ self.assertFalse(os.path.exists(moved_path))
+ mozfile.move(file_path, moved_path)
+ self.assertFalse(os.path.exists(file_path))
+ self.assertTrue(os.path.isfile(moved_path))
+
+ def test_move_file_with_retry(self):
+ file_path = os.path.join(self.tempdir, *stubs.files[1])
+ moved_path = file_path + '.moved'
+
+ with wait_file_opened_in_thread(file_path, 0.2):
+ # first move attempt should fail on windows and be retried
+ mozfile.move(file_path, moved_path)
+ self.assertFalse(os.path.exists(file_path))
+ self.assertTrue(os.path.isfile(moved_path))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozfile/tests/test_tempdir.py b/testing/mozbase/mozfile/tests/test_tempdir.py
new file mode 100644
index 000000000..81f03d095
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/test_tempdir.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+tests for mozfile.TemporaryDirectory
+"""
+
+from mozfile import TemporaryDirectory
+import os
+import unittest
+
+
+class TestTemporaryDirectory(unittest.TestCase):
+
+ def test_removed(self):
+ """ensure that a TemporaryDirectory gets removed"""
+ path = None
+ with TemporaryDirectory() as tmp:
+ path = tmp
+ self.assertTrue(os.path.isdir(tmp))
+ tmpfile = os.path.join(tmp, "a_temp_file")
+ open(tmpfile, "w").write("data")
+ self.assertTrue(os.path.isfile(tmpfile))
+ self.assertFalse(os.path.isdir(path))
+ self.assertFalse(os.path.exists(path))
+
+ def test_exception(self):
+ """ensure that TemporaryDirectory handles exceptions"""
+ path = None
+ with self.assertRaises(Exception):
+ with TemporaryDirectory() as tmp:
+ path = tmp
+ self.assertTrue(os.path.isdir(tmp))
+ raise Exception("oops")
+ self.assertFalse(os.path.isdir(path))
+ self.assertFalse(os.path.exists(path))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozfile/tests/test_tempfile.py b/testing/mozbase/mozfile/tests/test_tempfile.py
new file mode 100644
index 000000000..3c3d26d5d
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/test_tempfile.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+tests for mozfile.NamedTemporaryFile
+"""
+
+import mozfile
+import os
+import unittest
+
+
+class TestNamedTemporaryFile(unittest.TestCase):
+ """test our fix for NamedTemporaryFile"""
+
+ def test_named_temporary_file(self):
+ """ Ensure the fix for re-opening a NamedTemporaryFile works
+
+ Refer to https://bugzilla.mozilla.org/show_bug.cgi?id=818777
+ and https://bugzilla.mozilla.org/show_bug.cgi?id=821362
+ """
+
+ test_string = "A simple test"
+ with mozfile.NamedTemporaryFile() as temp:
+ # Test we can write to file
+ temp.write(test_string)
+ # Forced flush, so that we can read later
+ temp.flush()
+
+ # Test we can open the file again on all platforms
+ self.assertEqual(open(temp.name).read(), test_string)
+
+ def test_iteration(self):
+ """ensure the line iterator works"""
+
+ # make a file and write to it
+ tf = mozfile.NamedTemporaryFile()
+ notes = ['doe', 'rae', 'mi']
+ for note in notes:
+ tf.write('%s\n' % note)
+ tf.flush()
+
+ # now read from it
+ tf.seek(0)
+ lines = [line.rstrip('\n') for line in tf.readlines()]
+ self.assertEqual(lines, notes)
+
+ # now read from it iteratively
+ lines = []
+ for line in tf:
+ lines.append(line.strip())
+ self.assertEqual(lines, []) # because we did not seek(0)
+ tf.seek(0)
+ lines = []
+ for line in tf:
+ lines.append(line.strip())
+ self.assertEqual(lines, notes)
+
+ def test_delete(self):
+ """ensure ``delete=True/False`` works as expected"""
+
+ # make a deleteable file; ensure it gets cleaned up
+ path = None
+ with mozfile.NamedTemporaryFile(delete=True) as tf:
+ path = tf.name
+ self.assertTrue(isinstance(path, basestring))
+ self.assertFalse(os.path.exists(path))
+
+ # it is also deleted when __del__ is called
+ # here we will do so explicitly
+ tf = mozfile.NamedTemporaryFile(delete=True)
+ path = tf.name
+ self.assertTrue(os.path.exists(path))
+ del tf
+ self.assertFalse(os.path.exists(path))
+
+ # Now the same thing but we won't delete the file
+ path = None
+ try:
+ with mozfile.NamedTemporaryFile(delete=False) as tf:
+ path = tf.name
+ self.assertTrue(os.path.exists(path))
+ finally:
+ if path and os.path.exists(path):
+ os.remove(path)
+
+ path = None
+ try:
+ tf = mozfile.NamedTemporaryFile(delete=False)
+ path = tf.name
+ self.assertTrue(os.path.exists(path))
+ del tf
+ self.assertTrue(os.path.exists(path))
+ finally:
+ if path and os.path.exists(path):
+ os.remove(path)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozfile/tests/test_url.py b/testing/mozbase/mozfile/tests/test_url.py
new file mode 100755
index 000000000..7d2b12b39
--- /dev/null
+++ b/testing/mozbase/mozfile/tests/test_url.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+"""
+tests for is_url
+"""
+
+import unittest
+from mozfile import is_url
+
+
+class TestIsUrl(unittest.TestCase):
+ """test the is_url function"""
+
+ def test_is_url(self):
+ self.assertTrue(is_url('http://mozilla.org'))
+ self.assertFalse(is_url('/usr/bin/mozilla.org'))
+ self.assertTrue(is_url('file:///usr/bin/mozilla.org'))
+ self.assertFalse(is_url('c:\foo\bar'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozhttpd/mozhttpd/__init__.py b/testing/mozbase/mozhttpd/mozhttpd/__init__.py
new file mode 100644
index 000000000..c15b0d028
--- /dev/null
+++ b/testing/mozbase/mozhttpd/mozhttpd/__init__.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Mozhttpd is a simple http webserver written in python, designed expressly
+for use in automated testing scenarios. It is designed to both serve static
+content and provide simple web services.
+
+The server is based on python standard library modules such as
+SimpleHttpServer, urlparse, etc. The ThreadingMixIn is used to
+serve each request on a discrete thread.
+
+Some existing uses of mozhttpd include Peptest_, Eideticker_, and Talos_.
+
+.. _Peptest: https://github.com/mozilla/peptest/
+
+.. _Eideticker: https://github.com/mozilla/eideticker/
+
+.. _Talos: http://hg.mozilla.org/build/
+
+The following simple example creates a basic HTTP server which serves
+content from the current directory, defines a single API endpoint
+`/api/resource/<resourceid>` and then serves requests indefinitely:
+
+::
+
+ import mozhttpd
+
+ @mozhttpd.handlers.json_response
+ def resource_get(request, objid):
+ return (200, { 'id': objid,
+ 'query': request.query })
+
+
+ httpd = mozhttpd.MozHttpd(port=8080, docroot='.',
+ urlhandlers = [ { 'method': 'GET',
+ 'path': '/api/resources/([^/]+)/?',
+ 'function': resource_get } ])
+ print "Serving '%s' at %s:%s" % (httpd.docroot, httpd.host, httpd.port)
+ httpd.start(block=True)
+
+"""
+
+from mozhttpd import MozHttpd, Request, RequestHandler, main
+from handlers import json_response
+
+__all__ = ['MozHttpd', 'Request', 'RequestHandler', 'main', 'json_response']
diff --git a/testing/mozbase/mozhttpd/mozhttpd/handlers.py b/testing/mozbase/mozhttpd/mozhttpd/handlers.py
new file mode 100644
index 000000000..1b0a86a40
--- /dev/null
+++ b/testing/mozbase/mozhttpd/mozhttpd/handlers.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+
+def json_response(func):
+ """ Translates results of 'func' into a JSON response. """
+ def wrap(*a, **kw):
+ (code, data) = func(*a, **kw)
+ json_data = json.dumps(data)
+ return (code, {'Content-type': 'application/json',
+ 'Content-Length': len(json_data)}, json_data)
+
+ return wrap
diff --git a/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
new file mode 100755
index 000000000..4ca0847d2
--- /dev/null
+++ b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
@@ -0,0 +1,330 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import BaseHTTPServer
+import SimpleHTTPServer
+import errno
+import logging
+import threading
+import posixpath
+import socket
+import sys
+import os
+import urllib
+import urlparse
+import re
+import moznetwork
+import time
+from SocketServer import ThreadingMixIn
+
+
+class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer):
+ allow_reuse_address = True
+ acceptable_errors = (errno.EPIPE, errno.ECONNABORTED)
+
+ def handle_error(self, request, client_address):
+ error = sys.exc_value
+
+ if ((isinstance(error, socket.error) and
+ isinstance(error.args, tuple) and
+ error.args[0] in self.acceptable_errors)
+ or
+ (isinstance(error, IOError) and
+ error.errno in self.acceptable_errors)):
+ pass # remote hang up before the result is sent
+ else:
+ logging.error(error)
+
+
+class Request(object):
+ """Details of a request."""
+
+ # attributes from urlsplit that this class also sets
+ uri_attrs = ('scheme', 'netloc', 'path', 'query', 'fragment')
+
+ def __init__(self, uri, headers, rfile=None):
+ self.uri = uri
+ self.headers = headers
+ parsed = urlparse.urlsplit(uri)
+ for i, attr in enumerate(self.uri_attrs):
+ setattr(self, attr, parsed[i])
+ try:
+ body_len = int(self.headers.get('Content-length', 0))
+ except ValueError:
+ body_len = 0
+ if body_len and rfile:
+ self.body = rfile.read(body_len)
+ else:
+ self.body = None
+
+
+class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+
+ docroot = os.getcwd() # current working directory at time of import
+ proxy_host_dirs = False
+ request_log = []
+ log_requests = False
+ request = None
+
+ def __init__(self, *args, **kwargs):
+ SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
+ self.extensions_map['.svg'] = 'image/svg+xml'
+
+ def _try_handler(self, method):
+ if self.log_requests:
+ self.request_log.append({'method': method,
+ 'path': self.request.path,
+ 'time': time.time()})
+
+ handlers = [handler for handler in self.urlhandlers
+ if handler['method'] == method]
+ for handler in handlers:
+ m = re.match(handler['path'], self.request.path)
+ if m:
+ (response_code, headerdict, data) = \
+ handler['function'](self.request, *m.groups())
+ self.send_response(response_code)
+ for (keyword, value) in headerdict.iteritems():
+ self.send_header(keyword, value)
+ self.end_headers()
+ self.wfile.write(data)
+
+ return True
+
+ return False
+
+ def _find_path(self):
+ """Find the on-disk path to serve this request from,
+ using self.path_mappings and self.docroot.
+ Return (url_path, disk_path)."""
+ path_components = filter(None, self.request.path.split('/'))
+ for prefix, disk_path in self.path_mappings.iteritems():
+ prefix_components = filter(None, prefix.split('/'))
+ if len(path_components) < len(prefix_components):
+ continue
+ if path_components[:len(prefix_components)] == prefix_components:
+ return ('/'.join(path_components[len(prefix_components):]),
+ disk_path)
+ if self.docroot:
+ return self.request.path, self.docroot
+ return None
+
+ def parse_request(self):
+ retval = SimpleHTTPServer.SimpleHTTPRequestHandler.parse_request(self)
+ self.request = Request(self.path, self.headers, self.rfile)
+ return retval
+
+ def do_GET(self):
+ if not self._try_handler('GET'):
+ res = self._find_path()
+ if res:
+ self.path, self.disk_root = res
+ # don't include query string and fragment, and prepend
+ # host directory if required.
+ if self.request.netloc and self.proxy_host_dirs:
+ self.path = '/' + self.request.netloc + \
+ self.path
+ SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
+ else:
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write('')
+
+ def do_POST(self):
+ # if we don't have a match, we always fall through to 404 (this may
+ # not be "technically" correct if we have a local file at the same
+ # path as the resource but... meh)
+ if not self._try_handler('POST'):
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write('')
+
+ def do_DEL(self):
+ # if we don't have a match, we always fall through to 404 (this may
+ # not be "technically" correct if we have a local file at the same
+ # path as the resource but... meh)
+ if not self._try_handler('DEL'):
+ self.send_response(404)
+ self.end_headers()
+ self.wfile.write('')
+
+ def translate_path(self, path):
+ # this is taken from SimpleHTTPRequestHandler.translate_path(),
+ # except we serve from self.docroot instead of os.getcwd(), and
+ # parse_request()/do_GET() have already stripped the query string and
+ # fragment and mangled the path for proxying, if required.
+ path = posixpath.normpath(urllib.unquote(self.path))
+ words = path.split('/')
+ words = filter(None, words)
+ path = self.disk_root
+ for word in words:
+ drive, word = os.path.splitdrive(word)
+ head, word = os.path.split(word)
+ if word in (os.curdir, os.pardir):
+ continue
+ path = os.path.join(path, word)
+ return path
+
+ # I found on my local network that calls to this were timing out
+ # I believe all of these calls are from log_message
+ def address_string(self):
+ return "a.b.c.d"
+
+ # This produces a LOT of noise
+ def log_message(self, format, *args):
+ pass
+
+
+class MozHttpd(object):
+ """
+ :param host: Host from which to serve (default 127.0.0.1)
+ :param port: Port from which to serve (default 8888)
+ :param docroot: Server root (default os.getcwd())
+ :param urlhandlers: Handlers to specify behavior against method and path match (default None)
+ :param path_mappings: A dict mapping URL prefixes to additional on-disk paths.
+ :param proxy_host_dirs: Toggle proxy behavior (default False)
+ :param log_requests: Toggle logging behavior (default False)
+
+ Very basic HTTP server class. Takes a docroot (path on the filesystem)
+ and a set of urlhandler dictionaries of the form:
+
+ ::
+
+ {
+ 'method': HTTP method (string): GET, POST, or DEL,
+ 'path': PATH_INFO (regular expression string),
+ 'function': function of form fn(arg1, arg2, arg3, ..., request)
+ }
+
+ and serves HTTP. For each request, MozHttpd will either return a file
+ off the docroot, or dispatch to a handler function (if both path and
+ method match).
+
+ Note that one of docroot or urlhandlers may be None (in which case no
+ local files or handlers, respectively, will be used). If both docroot or
+ urlhandlers are None then MozHttpd will default to serving just the local
+ directory.
+
+ MozHttpd also handles proxy requests (i.e. with a full URI on the request
+ line). By default files are served from docroot according to the request
+ URI's path component, but if proxy_host_dirs is True, files are served
+ from <self.docroot>/<host>/.
+
+ For example, the request "GET http://foo.bar/dir/file.html" would
+ (assuming no handlers match) serve <docroot>/dir/file.html if
+ proxy_host_dirs is False, or <docroot>/foo.bar/dir/file.html if it is
+ True.
+ """
+
+ def __init__(self,
+ host="127.0.0.1",
+ port=0,
+ docroot=None,
+ urlhandlers=None,
+ path_mappings=None,
+ proxy_host_dirs=False,
+ log_requests=False):
+ self.host = host
+ self.port = int(port)
+ self.docroot = docroot
+ if not (urlhandlers or docroot or path_mappings):
+ self.docroot = os.getcwd()
+ self.proxy_host_dirs = proxy_host_dirs
+ self.httpd = None
+ self.urlhandlers = urlhandlers or []
+ self.path_mappings = path_mappings or {}
+ self.log_requests = log_requests
+ self.request_log = []
+
+ class RequestHandlerInstance(RequestHandler):
+ docroot = self.docroot
+ urlhandlers = self.urlhandlers
+ path_mappings = self.path_mappings
+ proxy_host_dirs = self.proxy_host_dirs
+ request_log = self.request_log
+ log_requests = self.log_requests
+
+ self.handler_class = RequestHandlerInstance
+
+ def start(self, block=False):
+ """
+ Starts the server.
+
+ If `block` is True, the call will not return. If `block` is False, the
+ server will be started on a separate thread that can be terminated by
+ a call to stop().
+ """
+ self.httpd = EasyServer((self.host, self.port), self.handler_class)
+ if block:
+ self.httpd.serve_forever()
+ else:
+ self.server = threading.Thread(target=self.httpd.serve_forever)
+ self.server.setDaemon(True) # don't hang on exit
+ self.server.start()
+
+ def stop(self):
+ """
+ Stops the server.
+
+ If the server is not running, this method has no effect.
+ """
+ if self.httpd:
+ # FIXME: There is no shutdown() method in Python 2.4...
+ try:
+ self.httpd.shutdown()
+ except AttributeError:
+ pass
+ self.httpd = None
+
+ def get_url(self, path="/"):
+ """
+ Returns a URL that can be used for accessing the server (e.g. http://192.168.1.3:4321/)
+
+ :param path: Path to append to URL (e.g. if path were /foobar.html you would get a URL like
+ http://192.168.1.3:4321/foobar.html). Default is `/`.
+ """
+ if not self.httpd:
+ return None
+
+ return "http://%s:%s%s" % (self.host, self.httpd.server_port, path)
+
+ __del__ = stop
+
+
+def main(args=sys.argv[1:]):
+
+ # parse command line options
+ from optparse import OptionParser
+ parser = OptionParser()
+ parser.add_option('-p', '--port', dest='port',
+ type="int", default=8888,
+ help="port to run the server on [DEFAULT: %default]")
+ parser.add_option('-H', '--host', dest='host',
+ default='127.0.0.1',
+ help="host [DEFAULT: %default]")
+ parser.add_option('-i', '--external-ip', action="store_true",
+ dest='external_ip', default=False,
+ help="find and use external ip for host")
+ parser.add_option('-d', '--docroot', dest='docroot',
+ default=os.getcwd(),
+ help="directory to serve files from [DEFAULT: %default]")
+ options, args = parser.parse_args(args)
+ if args:
+ parser.error("mozhttpd does not take any arguments")
+
+ if options.external_ip:
+ host = moznetwork.get_lan_ip()
+ else:
+ host = options.host
+
+ # create the server
+ server = MozHttpd(host=host, port=options.port, docroot=options.docroot)
+
+ print "Serving '%s' at %s:%s" % (server.docroot, server.host, server.port)
+ server.start(block=True)
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/mozhttpd/setup.py b/testing/mozbase/mozhttpd/setup.py
new file mode 100644
index 000000000..b7799dddd
--- /dev/null
+++ b/testing/mozbase/mozhttpd/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = '0.7'
+deps = ['moznetwork >= 0.24']
+
+setup(name='mozhttpd',
+ version=PACKAGE_VERSION,
+ description="Python webserver intended for use with Mozilla testing",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozhttpd'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozhttpd = mozhttpd:main
+ """,
+ )
diff --git a/testing/mozbase/mozhttpd/tests/api.py b/testing/mozbase/mozhttpd/tests/api.py
new file mode 100644
index 000000000..b785ac5ef
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/api.py
@@ -0,0 +1,266 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import mozhttpd
+import urllib2
+import os
+import unittest
+import json
+import tempfile
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ApiTest(unittest.TestCase):
+ resource_get_called = 0
+ resource_post_called = 0
+ resource_del_called = 0
+
+ @mozhttpd.handlers.json_response
+ def resource_get(self, request, objid):
+ self.resource_get_called += 1
+ return (200, {'called': self.resource_get_called,
+ 'id': objid,
+ 'query': request.query})
+
+ @mozhttpd.handlers.json_response
+ def resource_post(self, request):
+ self.resource_post_called += 1
+ return (201, {'called': self.resource_post_called,
+ 'data': json.loads(request.body),
+ 'query': request.query})
+
+ @mozhttpd.handlers.json_response
+ def resource_del(self, request, objid):
+ self.resource_del_called += 1
+ return (200, {'called': self.resource_del_called,
+ 'id': objid,
+ 'query': request.query})
+
+ def get_url(self, path, server_port, querystr):
+ url = "http://127.0.0.1:%s%s" % (server_port, path)
+ if querystr:
+ url += "?%s" % querystr
+ return url
+
+ def try_get(self, server_port, querystr):
+ self.resource_get_called = 0
+
+ f = urllib2.urlopen(self.get_url('/api/resource/1', server_port, querystr))
+ try:
+ self.assertEqual(f.getcode(), 200)
+ except AttributeError:
+ pass # python 2.4
+ self.assertEqual(json.loads(f.read()), {'called': 1, 'id': str(1), 'query': querystr})
+ self.assertEqual(self.resource_get_called, 1)
+
+ def try_post(self, server_port, querystr):
+ self.resource_post_called = 0
+
+ postdata = {'hamburgers': '1234'}
+ try:
+ f = urllib2.urlopen(self.get_url('/api/resource/', server_port, querystr),
+ data=json.dumps(postdata))
+ except urllib2.HTTPError as e:
+ # python 2.4
+ self.assertEqual(e.code, 201)
+ body = e.fp.read()
+ else:
+ self.assertEqual(f.getcode(), 201)
+ body = f.read()
+ self.assertEqual(json.loads(body), {'called': 1,
+ 'data': postdata,
+ 'query': querystr})
+ self.assertEqual(self.resource_post_called, 1)
+
+ def try_del(self, server_port, querystr):
+ self.resource_del_called = 0
+
+ opener = urllib2.build_opener(urllib2.HTTPHandler)
+ request = urllib2.Request(self.get_url('/api/resource/1', server_port, querystr))
+ request.get_method = lambda: 'DEL'
+ f = opener.open(request)
+
+ try:
+ self.assertEqual(f.getcode(), 200)
+ except AttributeError:
+ pass # python 2.4
+ self.assertEqual(json.loads(f.read()), {'called': 1, 'id': str(1), 'query': querystr})
+ self.assertEqual(self.resource_del_called, 1)
+
+ def test_api(self):
+ httpd = mozhttpd.MozHttpd(port=0,
+ urlhandlers=[{'method': 'GET',
+ 'path': '/api/resource/([^/]+)/?',
+ 'function': self.resource_get},
+ {'method': 'POST',
+ 'path': '/api/resource/?',
+ 'function': self.resource_post},
+ {'method': 'DEL',
+ 'path': '/api/resource/([^/]+)/?',
+ 'function': self.resource_del}
+ ])
+ httpd.start(block=False)
+
+ server_port = httpd.httpd.server_port
+
+ # GET
+ self.try_get(server_port, '')
+ self.try_get(server_port, '?foo=bar')
+
+ # POST
+ self.try_post(server_port, '')
+ self.try_post(server_port, '?foo=bar')
+
+ # DEL
+ self.try_del(server_port, '')
+ self.try_del(server_port, '?foo=bar')
+
+ # GET: By default we don't serve any files if we just define an API
+ exception_thrown = False
+ try:
+ urllib2.urlopen(self.get_url('/', server_port, None))
+ except urllib2.HTTPError as e:
+ self.assertEqual(e.code, 404)
+ exception_thrown = True
+ self.assertTrue(exception_thrown)
+
+ def test_nonexistent_resources(self):
+ # Create a server with a placeholder handler so we don't fall back
+ # to serving local files
+ httpd = mozhttpd.MozHttpd(port=0)
+ httpd.start(block=False)
+ server_port = httpd.httpd.server_port
+
+ # GET: Return 404 for non-existent endpoint
+ exception_thrown = False
+ try:
+ urllib2.urlopen(self.get_url('/api/resource/', server_port, None))
+ except urllib2.HTTPError as e:
+ self.assertEqual(e.code, 404)
+ exception_thrown = True
+ self.assertTrue(exception_thrown)
+
+ # POST: POST should also return 404
+ exception_thrown = False
+ try:
+ urllib2.urlopen(self.get_url('/api/resource/', server_port, None),
+ data=json.dumps({}))
+ except urllib2.HTTPError as e:
+ self.assertEqual(e.code, 404)
+ exception_thrown = True
+ self.assertTrue(exception_thrown)
+
+ # DEL: DEL should also return 404
+ exception_thrown = False
+ try:
+ opener = urllib2.build_opener(urllib2.HTTPHandler)
+ request = urllib2.Request(self.get_url('/api/resource/', server_port,
+ None))
+ request.get_method = lambda: 'DEL'
+ opener.open(request)
+ except urllib2.HTTPError:
+ self.assertEqual(e.code, 404)
+ exception_thrown = True
+ self.assertTrue(exception_thrown)
+
+ def test_api_with_docroot(self):
+ httpd = mozhttpd.MozHttpd(port=0, docroot=here,
+ urlhandlers=[{'method': 'GET',
+ 'path': '/api/resource/([^/]+)/?',
+ 'function': self.resource_get}])
+ httpd.start(block=False)
+ server_port = httpd.httpd.server_port
+
+ # We defined a docroot, so we expect a directory listing
+ f = urllib2.urlopen(self.get_url('/', server_port, None))
+ try:
+ self.assertEqual(f.getcode(), 200)
+ except AttributeError:
+ pass # python 2.4
+ self.assertTrue('Directory listing for' in f.read())
+
+ # Make sure API methods still work
+ self.try_get(server_port, '')
+ self.try_get(server_port, '?foo=bar')
+
+
+class ProxyTest(unittest.TestCase):
+
+ def tearDown(self):
+ # reset proxy opener in case it changed
+ urllib2.install_opener(None)
+
+ def test_proxy(self):
+ docroot = tempfile.mkdtemp()
+ self.addCleanup(mozfile.remove, docroot)
+ hosts = ('mozilla.com', 'mozilla.org')
+ unproxied_host = 'notmozilla.org'
+
+ def url(host): return 'http://%s/' % host
+
+ index_filename = 'index.html'
+
+ def index_contents(host): return '%s index' % host
+
+ index = file(os.path.join(docroot, index_filename), 'w')
+ index.write(index_contents('*'))
+ index.close()
+
+ httpd = mozhttpd.MozHttpd(port=0, docroot=docroot)
+ httpd.start(block=False)
+ server_port = httpd.httpd.server_port
+
+ proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
+ server_port})
+ urllib2.install_opener(urllib2.build_opener(proxy_support))
+
+ for host in hosts:
+ f = urllib2.urlopen(url(host))
+ try:
+ self.assertEqual(f.getcode(), 200)
+ except AttributeError:
+ pass # python 2.4
+ self.assertEqual(f.read(), index_contents('*'))
+
+ httpd.stop()
+
+ # test separate directories per host
+
+ httpd = mozhttpd.MozHttpd(port=0, docroot=docroot, proxy_host_dirs=True)
+ httpd.start(block=False)
+ server_port = httpd.httpd.server_port
+
+ proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
+ server_port})
+ urllib2.install_opener(urllib2.build_opener(proxy_support))
+
+ # set up dirs
+ for host in hosts:
+ os.mkdir(os.path.join(docroot, host))
+ file(os.path.join(docroot, host, index_filename), 'w') \
+ .write(index_contents(host))
+
+ for host in hosts:
+ f = urllib2.urlopen(url(host))
+ try:
+ self.assertEqual(f.getcode(), 200)
+ except AttributeError:
+ pass # python 2.4
+ self.assertEqual(f.read(), index_contents(host))
+
+ exc = None
+ try:
+ urllib2.urlopen(url(unproxied_host))
+ except urllib2.HTTPError as e:
+ exc = e
+ self.assertNotEqual(exc, None)
+ self.assertEqual(exc.code, 404)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozhttpd/tests/baseurl.py b/testing/mozbase/mozhttpd/tests/baseurl.py
new file mode 100644
index 000000000..0e971e6b2
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/baseurl.py
@@ -0,0 +1,19 @@
+import mozhttpd
+import unittest
+
+
+class BaseUrlTest(unittest.TestCase):
+
+ def test_base_url(self):
+ httpd = mozhttpd.MozHttpd(port=0)
+ self.assertEqual(httpd.get_url(), None)
+ httpd.start(block=False)
+ self.assertEqual("http://127.0.0.1:%s/" % httpd.httpd.server_port,
+ httpd.get_url())
+ self.assertEqual("http://127.0.0.1:%s/cheezburgers.html" %
+ httpd.httpd.server_port,
+ httpd.get_url(path="/cheezburgers.html"))
+ httpd.stop()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozhttpd/tests/basic.py b/testing/mozbase/mozhttpd/tests/basic.py
new file mode 100644
index 000000000..8d64b4332
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/basic.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+import mozhttpd
+import mozfile
+import os
+import tempfile
+import unittest
+
+
+class TestBasic(unittest.TestCase):
+ """ Test basic Mozhttpd capabilites """
+
+ def test_basic(self):
+ """ Test mozhttpd can serve files """
+
+ tempdir = tempfile.mkdtemp()
+
+ # sizes is a dict of the form: name -> [size, binary_string, filepath]
+ sizes = {'small': [128], 'large': [16384]}
+
+ for k in sizes.keys():
+ # Generate random binary string
+ sizes[k].append(os.urandom(sizes[k][0]))
+
+ # Add path of file with binary string to list
+ fpath = os.path.join(tempdir, k)
+ sizes[k].append(fpath)
+
+ # Write binary string to file
+ with open(fpath, 'wb') as f:
+ f.write(sizes[k][1])
+
+ server = mozhttpd.MozHttpd(docroot=tempdir)
+ server.start()
+ server_url = server.get_url()
+
+ # Retrieve file and check contents matchup
+ for k in sizes.keys():
+ retrieved_content = mozfile.load(server_url + k).read()
+ self.assertEqual(retrieved_content, sizes[k][1])
+
+ # Cleanup tempdir and related files
+ mozfile.rmtree(tempdir)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozhttpd/tests/filelisting.py b/testing/mozbase/mozhttpd/tests/filelisting.py
new file mode 100644
index 000000000..6abea757f
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/filelisting.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozhttpd
+import urllib2
+import os
+import unittest
+import re
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class FileListingTest(unittest.TestCase):
+
+ def check_filelisting(self, path=''):
+ filelist = os.listdir(here)
+
+ httpd = mozhttpd.MozHttpd(port=0, docroot=here)
+ httpd.start(block=False)
+ f = urllib2.urlopen("http://%s:%s/%s" % ('127.0.0.1', httpd.httpd.server_port, path))
+ for line in f.readlines():
+ webline = re.sub('\<[a-zA-Z0-9\-\_\.\=\"\'\/\\\%\!\@\#\$\^\&\*\(\) ]*\>',
+ '', line.strip('\n')).strip('/').strip().strip('@')
+
+ if webline and not webline.startswith("Directory listing for"):
+ self.assertTrue(webline in filelist,
+ "File %s in dir listing corresponds to a file" % webline)
+ filelist.remove(webline)
+ self.assertFalse(
+ filelist, "Should have no items in filelist (%s) unaccounted for" % filelist)
+
+ def test_filelist(self):
+ self.check_filelisting()
+
+ def test_filelist_params(self):
+ self.check_filelisting('?foo=bar&fleem=&foo=fleem')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozhttpd/tests/manifest.ini b/testing/mozbase/mozhttpd/tests/manifest.ini
new file mode 100644
index 000000000..3f3d42d9b
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/manifest.ini
@@ -0,0 +1,6 @@
+[api.py]
+[baseurl.py]
+[basic.py]
+[filelisting.py]
+[paths.py]
+[requestlog.py]
diff --git a/testing/mozbase/mozhttpd/tests/paths.py b/testing/mozbase/mozhttpd/tests/paths.py
new file mode 100644
index 000000000..45ae40144
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/paths.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from mozfile import TemporaryDirectory
+import mozhttpd
+import os
+import unittest
+import urllib2
+
+
+class PathTest(unittest.TestCase):
+
+ def try_get(self, url, expected_contents):
+ f = urllib2.urlopen(url)
+ self.assertEqual(f.getcode(), 200)
+ self.assertEqual(f.read(), expected_contents)
+
+ def try_get_expect_404(self, url):
+ with self.assertRaises(urllib2.HTTPError) as cm:
+ urllib2.urlopen(url)
+ self.assertEqual(404, cm.exception.code)
+
+ def test_basic(self):
+ """Test that requests to docroot and a path mapping work as expected."""
+ with TemporaryDirectory() as d1, TemporaryDirectory() as d2:
+ open(os.path.join(d1, "test1.txt"), "w").write("test 1 contents")
+ open(os.path.join(d2, "test2.txt"), "w").write("test 2 contents")
+ httpd = mozhttpd.MozHttpd(port=0,
+ docroot=d1,
+ path_mappings={'/files': d2}
+ )
+ httpd.start(block=False)
+ self.try_get(httpd.get_url("/test1.txt"), "test 1 contents")
+ self.try_get(httpd.get_url("/files/test2.txt"), "test 2 contents")
+ self.try_get_expect_404(httpd.get_url("/files/test2_nope.txt"))
+ httpd.stop()
+
+ def test_substring_mappings(self):
+ """Test that a path mapping that's a substring of another works."""
+ with TemporaryDirectory() as d1, TemporaryDirectory() as d2:
+ open(os.path.join(d1, "test1.txt"), "w").write("test 1 contents")
+ open(os.path.join(d2, "test2.txt"), "w").write("test 2 contents")
+ httpd = mozhttpd.MozHttpd(port=0,
+ path_mappings={'/abcxyz': d1,
+ '/abc': d2, }
+ )
+ httpd.start(block=False)
+ self.try_get(httpd.get_url("/abcxyz/test1.txt"), "test 1 contents")
+ self.try_get(httpd.get_url("/abc/test2.txt"), "test 2 contents")
+ httpd.stop()
+
+ def test_multipart_path_mapping(self):
+ """Test that a path mapping with multiple directories works."""
+ with TemporaryDirectory() as d1:
+ open(os.path.join(d1, "test1.txt"), "w").write("test 1 contents")
+ httpd = mozhttpd.MozHttpd(port=0,
+ path_mappings={'/abc/def/ghi': d1}
+ )
+ httpd.start(block=False)
+ self.try_get(httpd.get_url("/abc/def/ghi/test1.txt"), "test 1 contents")
+ self.try_get_expect_404(httpd.get_url("/abc/test1.txt"))
+ self.try_get_expect_404(httpd.get_url("/abc/def/test1.txt"))
+ httpd.stop()
+
+ def test_no_docroot(self):
+ """Test that path mappings with no docroot work."""
+ with TemporaryDirectory() as d1:
+ httpd = mozhttpd.MozHttpd(port=0,
+ path_mappings={'/foo': d1})
+ httpd.start(block=False)
+ self.try_get_expect_404(httpd.get_url())
+ httpd.stop()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozhttpd/tests/requestlog.py b/testing/mozbase/mozhttpd/tests/requestlog.py
new file mode 100644
index 000000000..bf2c59ec3
--- /dev/null
+++ b/testing/mozbase/mozhttpd/tests/requestlog.py
@@ -0,0 +1,41 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozhttpd
+import urllib2
+import os
+import unittest
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class RequestLogTest(unittest.TestCase):
+
+ def check_logging(self, log_requests=False):
+
+ httpd = mozhttpd.MozHttpd(port=0, docroot=here, log_requests=log_requests)
+ httpd.start(block=False)
+ url = "http://%s:%s/" % ('127.0.0.1', httpd.httpd.server_port)
+ f = urllib2.urlopen(url)
+ f.read()
+
+ return httpd.request_log
+
+ def test_logging_enabled(self):
+ request_log = self.check_logging(log_requests=True)
+
+ self.assertEqual(len(request_log), 1)
+
+ log_entry = request_log[0]
+ self.assertEqual(log_entry['method'], 'GET')
+ self.assertEqual(log_entry['path'], '/')
+ self.assertEqual(type(log_entry['time']), float)
+
+ def test_logging_disabled(self):
+ request_log = self.check_logging(log_requests=False)
+
+ self.assertEqual(len(request_log), 0)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozinfo/mozinfo/__init__.py b/testing/mozbase/mozinfo/mozinfo/__init__.py
new file mode 100644
index 000000000..7d0483cb5
--- /dev/null
+++ b/testing/mozbase/mozinfo/mozinfo/__init__.py
@@ -0,0 +1,60 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+"""
+interface to transform introspected system information to a format palatable to
+Mozilla
+
+Module variables:
+
+.. attribute:: bits
+
+ 32 or 64
+
+.. attribute:: isBsd
+
+ Returns ``True`` if the operating system is BSD
+
+.. attribute:: isLinux
+
+ Returns ``True`` if the operating system is Linux
+
+.. attribute:: isMac
+
+ Returns ``True`` if the operating system is Mac
+
+.. attribute:: isWin
+
+ Returns ``True`` if the operating system is Windows
+
+.. attribute:: os
+
+ Operating system [``'win'``, ``'mac'``, ``'linux'``, ...]
+
+.. attribute:: processor
+
+ Processor architecture [``'x86'``, ``'x86_64'``, ``'ppc'``, ...]
+
+.. attribute:: version
+
+ Operating system version string. For windows, the service pack information is also included
+
+.. attribute:: info
+
+ Returns information identifying the current system.
+
+ * :attr:`bits`
+ * :attr:`os`
+ * :attr:`processor`
+ * :attr:`version`
+
+"""
+
+from . import mozinfo
+from .mozinfo import *
+
+__all__ = mozinfo.__all__
diff --git a/testing/mozbase/mozinfo/mozinfo/mozinfo.py b/testing/mozbase/mozinfo/mozinfo/mozinfo.py
new file mode 100755
index 000000000..81a30307d
--- /dev/null
+++ b/testing/mozbase/mozinfo/mozinfo/mozinfo.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# TODO: it might be a good idea of adding a system name (e.g. 'Ubuntu' for
+# linux) to the information; I certainly wouldn't want anyone parsing this
+# information and having behaviour depend on it
+
+from __future__ import absolute_import
+
+import os
+import platform
+import re
+import sys
+from .string_version import StringVersion
+
+
+# keep a copy of the os module since updating globals overrides this
+_os = os
+
+
+class unknown(object):
+ """marker class for unknown information"""
+
+ def __nonzero__(self):
+ return False
+
+ def __str__(self):
+ return 'UNKNOWN'
+unknown = unknown() # singleton
+
+
+def get_windows_version():
+ import ctypes
+
+ class OSVERSIONINFOEXW(ctypes.Structure):
+ _fields_ = [('dwOSVersionInfoSize', ctypes.c_ulong),
+ ('dwMajorVersion', ctypes.c_ulong),
+ ('dwMinorVersion', ctypes.c_ulong),
+ ('dwBuildNumber', ctypes.c_ulong),
+ ('dwPlatformId', ctypes.c_ulong),
+ ('szCSDVersion', ctypes.c_wchar * 128),
+ ('wServicePackMajor', ctypes.c_ushort),
+ ('wServicePackMinor', ctypes.c_ushort),
+ ('wSuiteMask', ctypes.c_ushort),
+ ('wProductType', ctypes.c_byte),
+ ('wReserved', ctypes.c_byte)]
+
+ os_version = OSVERSIONINFOEXW()
+ os_version.dwOSVersionInfoSize = ctypes.sizeof(os_version)
+ retcode = ctypes.windll.Ntdll.RtlGetVersion(ctypes.byref(os_version))
+ if retcode != 0:
+ raise OSError
+
+ return os_version.dwMajorVersion, os_version.dwMinorVersion, os_version.dwBuildNumber
+
+# get system information
+info = {'os': unknown,
+ 'processor': unknown,
+ 'version': unknown,
+ 'os_version': unknown,
+ 'bits': unknown,
+ 'has_sandbox': unknown}
+(system, node, release, version, machine, processor) = platform.uname()
+(bits, linkage) = platform.architecture()
+
+# get os information and related data
+if system in ["Microsoft", "Windows"]:
+ info['os'] = 'win'
+ # There is a Python bug on Windows to determine platform values
+ # http://bugs.python.org/issue7860
+ if "PROCESSOR_ARCHITEW6432" in os.environ:
+ processor = os.environ.get("PROCESSOR_ARCHITEW6432", processor)
+ else:
+ processor = os.environ.get('PROCESSOR_ARCHITECTURE', processor)
+ system = os.environ.get("OS", system).replace('_', ' ')
+ (major, minor, _, _, service_pack) = os.sys.getwindowsversion()
+ info['service_pack'] = service_pack
+ if major >= 6 and minor >= 2:
+ # On windows >= 8.1 the system call that getwindowsversion uses has
+ # been frozen to always return the same values. In this case we call
+ # the RtlGetVersion API directly, which still provides meaningful
+ # values, at least for now.
+ major, minor, build_number = get_windows_version()
+ version = "%d.%d.%d" % (major, minor, build_number)
+
+ os_version = "%d.%d" % (major, minor)
+elif system.startswith('MINGW'):
+ # windows/mingw python build (msys)
+ info['os'] = 'win'
+ os_version = version = unknown
+elif system == "Linux":
+ if hasattr(platform, "linux_distribution"):
+ (distro, os_version, codename) = platform.linux_distribution()
+ else:
+ (distro, os_version, codename) = platform.dist()
+ if not processor:
+ processor = machine
+ version = "%s %s" % (distro, os_version)
+
+ # Bug in Python 2's `platform` library:
+ # It will return a triple of empty strings if the distribution is not supported.
+ # It works on Python 3. If we don't have an OS version,
+ # the unit tests fail to run.
+ if not distro and not os_version and not codename:
+ distro = 'lfs'
+ version = release
+ os_version = release
+
+ info['os'] = 'linux'
+ info['linux_distro'] = distro
+elif system in ['DragonFly', 'FreeBSD', 'NetBSD', 'OpenBSD']:
+ info['os'] = 'bsd'
+ version = os_version = sys.platform
+elif system == "Darwin":
+ (release, versioninfo, machine) = platform.mac_ver()
+ version = "OS X %s" % release
+ versionNums = release.split('.')[:2]
+ os_version = "%s.%s" % (versionNums[0], versionNums[1])
+ info['os'] = 'mac'
+elif sys.platform in ('solaris', 'sunos5'):
+ info['os'] = 'unix'
+ os_version = version = sys.platform
+else:
+ os_version = version = unknown
+
+info['version'] = version
+info['os_version'] = StringVersion(os_version)
+
+# processor type and bits
+if processor in ["i386", "i686"]:
+ if bits == "32bit":
+ processor = "x86"
+ elif bits == "64bit":
+ processor = "x86_64"
+elif processor.upper() == "AMD64":
+ bits = "64bit"
+ processor = "x86_64"
+elif processor == "Power Macintosh":
+ processor = "ppc"
+bits = re.search('(\d+)bit', bits).group(1)
+info.update({'processor': processor,
+ 'bits': int(bits),
+ })
+
+if info['os'] == 'linux':
+ import ctypes
+ import errno
+ PR_SET_SECCOMP = 22
+ SECCOMP_MODE_FILTER = 2
+ ctypes.CDLL("libc.so.6", use_errno=True).prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, 0)
+ info['has_sandbox'] = ctypes.get_errno() == errno.EFAULT
+else:
+ info['has_sandbox'] = True
+
+# standard value of choices, for easy inspection
+choices = {'os': ['linux', 'bsd', 'win', 'mac', 'unix'],
+ 'bits': [32, 64],
+ 'processor': ['x86', 'x86_64', 'ppc']}
+
+
+def sanitize(info):
+ """Do some sanitization of input values, primarily
+ to handle universal Mac builds."""
+ if "processor" in info and info["processor"] == "universal-x86-x86_64":
+ # If we're running on OS X 10.6 or newer, assume 64-bit
+ if release[:4] >= "10.6": # Note this is a string comparison
+ info["processor"] = "x86_64"
+ info["bits"] = 64
+ else:
+ info["processor"] = "x86"
+ info["bits"] = 32
+
+# method for updating information
+
+
+def update(new_info):
+ """
+ Update the info.
+
+ :param new_info: Either a dict containing the new info or a path/url
+ to a json file containing the new info.
+ """
+
+ if isinstance(new_info, basestring):
+ # lazy import
+ import mozfile
+ import json
+ f = mozfile.load(new_info)
+ new_info = json.loads(f.read())
+ f.close()
+
+ info.update(new_info)
+ sanitize(info)
+ globals().update(info)
+
+ # convenience data for os access
+ for os_name in choices['os']:
+ globals()['is' + os_name.title()] = info['os'] == os_name
+ # unix is special
+ if isLinux or isBsd: # noqa
+ globals()['isUnix'] = True
+
+
+def find_and_update_from_json(*dirs):
+ """
+ Find a mozinfo.json file, load it, and update the info with the
+ contents.
+
+ :param dirs: Directories in which to look for the file. They will be
+ searched after first looking in the root of the objdir
+ if the current script is being run from a Mozilla objdir.
+
+ Returns the full path to mozinfo.json if it was found, or None otherwise.
+ """
+ # First, see if we're in an objdir
+ try:
+ from mozbuild.base import MozbuildObject, BuildEnvironmentNotFoundException
+ build = MozbuildObject.from_environment()
+ json_path = _os.path.join(build.topobjdir, "mozinfo.json")
+ if _os.path.isfile(json_path):
+ update(json_path)
+ return json_path
+ except ImportError:
+ pass
+ except BuildEnvironmentNotFoundException:
+ pass
+
+ for d in dirs:
+ d = _os.path.abspath(d)
+ json_path = _os.path.join(d, "mozinfo.json")
+ if _os.path.isfile(json_path):
+ update(json_path)
+ return json_path
+
+ return None
+
+
+def output_to_file(path):
+ import json
+ with open(path, 'w') as f:
+ f.write(json.dumps(info))
+
+update({})
+
+# exports
+__all__ = info.keys()
+__all__ += ['is' + os_name.title() for os_name in choices['os']]
+__all__ += [
+ 'info',
+ 'unknown',
+ 'main',
+ 'choices',
+ 'update',
+ 'find_and_update_from_json',
+ 'output_to_file',
+ 'StringVersion',
+]
+
+
+def main(args=None):
+
+ # parse the command line
+ from optparse import OptionParser
+ parser = OptionParser(description=__doc__)
+ for key in choices:
+ parser.add_option('--%s' % key, dest=key,
+ action='store_true', default=False,
+ help="display choices for %s" % key)
+ options, args = parser.parse_args()
+
+ # args are JSON blobs to override info
+ if args:
+ # lazy import
+ import json
+ for arg in args:
+ if _os.path.exists(arg):
+ string = file(arg).read()
+ else:
+ string = arg
+ update(json.loads(string))
+
+ # print out choices if requested
+ flag = False
+ for key, value in options.__dict__.items():
+ if value is True:
+ print '%s choices: %s' % (key, ' '.join([str(choice)
+ for choice in choices[key]]))
+ flag = True
+ if flag:
+ return
+
+ # otherwise, print out all info
+ for key, value in info.items():
+ print '%s: %s' % (key, value)
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/mozinfo/mozinfo/string_version.py b/testing/mozbase/mozinfo/mozinfo/string_version.py
new file mode 100644
index 000000000..fd77fa566
--- /dev/null
+++ b/testing/mozbase/mozinfo/mozinfo/string_version.py
@@ -0,0 +1,43 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from distutils.version import LooseVersion
+
+
+class StringVersion(str):
+ """
+ A string version that can be compared with comparison operators.
+ """
+
+ def __init__(self, vstring):
+ str.__init__(self, vstring)
+ self.version = LooseVersion(vstring)
+
+ def __repr__(self):
+ return "StringVersion ('%s')" % self
+
+ def __to_version(self, other):
+ if not isinstance(other, StringVersion):
+ other = StringVersion(other)
+ return other.version
+
+ # rich comparison methods
+
+ def __lt__(self, other):
+ return self.version < self.__to_version(other)
+
+ def __le__(self, other):
+ return self.version <= self.__to_version(other)
+
+ def __eq__(self, other):
+ return self.version == self.__to_version(other)
+
+ def __ne__(self, other):
+ return self.version != self.__to_version(other)
+
+ def __gt__(self, other):
+ return self.version > self.__to_version(other)
+
+ def __ge__(self, other):
+ return self.version >= self.__to_version(other)
diff --git a/testing/mozbase/mozinfo/setup.py b/testing/mozbase/mozinfo/setup.py
new file mode 100644
index 000000000..3e76b9db4
--- /dev/null
+++ b/testing/mozbase/mozinfo/setup.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = '0.9'
+
+# dependencies
+deps = ['mozfile >= 0.12']
+
+setup(name='mozinfo',
+ version=PACKAGE_VERSION,
+ description="Library to get system information for use in Mozilla testing",
+ long_description="see http://mozbase.readthedocs.org",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozinfo'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozinfo = mozinfo:main
+ """,
+ )
diff --git a/testing/mozbase/mozinfo/tests/manifest.ini b/testing/mozbase/mozinfo/tests/manifest.ini
new file mode 100644
index 000000000..528fdea7b
--- /dev/null
+++ b/testing/mozbase/mozinfo/tests/manifest.ini
@@ -0,0 +1 @@
+[test.py]
diff --git a/testing/mozbase/mozinfo/tests/test.py b/testing/mozbase/mozinfo/tests/test.py
new file mode 100644
index 000000000..b9457cff9
--- /dev/null
+++ b/testing/mozbase/mozinfo/tests/test.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import mock
+import os
+import shutil
+import sys
+import tempfile
+import unittest
+import mozinfo
+
+
+class TestMozinfo(unittest.TestCase):
+
+ def setUp(self):
+ reload(mozinfo)
+ self.tempdir = os.path.abspath(tempfile.mkdtemp())
+
+ # When running from an objdir mozinfo will use a build generated json file
+ # instead of the ones created for testing. Prevent that from happening.
+ # See bug 896038 for details.
+ sys.modules['mozbuild'] = None
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir)
+ del sys.modules['mozbuild']
+
+ def test_basic(self):
+ """Test that mozinfo has a few attributes."""
+ self.assertNotEqual(mozinfo.os, None)
+ # should have isFoo == True where os == "foo"
+ self.assertTrue(getattr(mozinfo, "is" + mozinfo.os[0].upper() + mozinfo.os[1:]))
+
+ def test_update(self):
+ """Test that mozinfo.update works."""
+ mozinfo.update({"foo": 123})
+ self.assertEqual(mozinfo.info["foo"], 123)
+
+ def test_update_file(self):
+ """Test that mozinfo.update can load a JSON file."""
+ j = os.path.join(self.tempdir, "mozinfo.json")
+ with open(j, "w") as f:
+ f.write(json.dumps({"foo": "xyz"}))
+ mozinfo.update(j)
+ self.assertEqual(mozinfo.info["foo"], "xyz")
+
+ def test_update_file_invalid_json(self):
+ """Test that mozinfo.update handles invalid JSON correctly"""
+ j = os.path.join(self.tempdir, 'test.json')
+ with open(j, 'w') as f:
+ f.write('invalid{"json":')
+ self.assertRaises(ValueError, mozinfo.update, [j])
+
+ def test_find_and_update_file(self):
+ """Test that mozinfo.find_and_update_from_json can
+ find mozinfo.json in a directory passed to it."""
+ j = os.path.join(self.tempdir, "mozinfo.json")
+ with open(j, "w") as f:
+ f.write(json.dumps({"foo": "abcdefg"}))
+ self.assertEqual(mozinfo.find_and_update_from_json(self.tempdir), j)
+ self.assertEqual(mozinfo.info["foo"], "abcdefg")
+
+ def test_find_and_update_file_invalid_json(self):
+ """Test that mozinfo.find_and_update_from_json can
+ handle invalid JSON"""
+ j = os.path.join(self.tempdir, "mozinfo.json")
+ with open(j, 'w') as f:
+ f.write('invalid{"json":')
+ self.assertRaises(ValueError, mozinfo.find_and_update_from_json, self.tempdir)
+
+ def test_find_and_update_file_mozbuild(self):
+ """Test that mozinfo.find_and_update_from_json can
+ find mozinfo.json using the mozbuild module."""
+ j = os.path.join(self.tempdir, "mozinfo.json")
+ with open(j, "w") as f:
+ f.write(json.dumps({"foo": "123456"}))
+ m = mock.MagicMock()
+ # Mock the value of MozbuildObject.from_environment().topobjdir.
+ m.MozbuildObject.from_environment.return_value.topobjdir = self.tempdir
+ with mock.patch.dict(sys.modules, {"mozbuild": m, "mozbuild.base": m}):
+ self.assertEqual(mozinfo.find_and_update_from_json(), j)
+ self.assertEqual(mozinfo.info["foo"], "123456")
+
+ def test_output_to_file(self):
+ """Test that mozinfo.output_to_file works."""
+ path = os.path.join(self.tempdir, "mozinfo.json")
+ mozinfo.output_to_file(path)
+ self.assertEqual(open(path).read(), json.dumps(mozinfo.info))
+
+
+class TestStringVersion(unittest.TestCase):
+
+ def test_os_version_is_a_StringVersion(self):
+ self.assertIsInstance(mozinfo.os_version, mozinfo.StringVersion)
+
+ def test_compare_to_string(self):
+ version = mozinfo.StringVersion('10.10')
+
+ self.assertGreater(version, '10.2')
+ self.assertGreater('11', version)
+ self.assertGreaterEqual(version, '10.10')
+ self.assertGreaterEqual('10.11', version)
+ self.assertEqual(version, '10.10')
+ self.assertEqual('10.10', version)
+ self.assertNotEqual(version, '10.2')
+ self.assertNotEqual('11', version)
+ self.assertLess(version, '11.8.5')
+ self.assertLess('10.2', version)
+ self.assertLessEqual(version, '11')
+ self.assertLessEqual('10.10', version)
+
+ def test_to_string(self):
+ self.assertEqual('10.10', str(mozinfo.StringVersion('10.10')))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozinstall/mozinstall/__init__.py b/testing/mozbase/mozinstall/mozinstall/__init__.py
new file mode 100644
index 000000000..5f96b7fac
--- /dev/null
+++ b/testing/mozbase/mozinstall/mozinstall/__init__.py
@@ -0,0 +1,6 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozinstall import *
diff --git a/testing/mozbase/mozinstall/mozinstall/mozinstall.py b/testing/mozbase/mozinstall/mozinstall/mozinstall.py
new file mode 100755
index 000000000..b4c6f95f7
--- /dev/null
+++ b/testing/mozbase/mozinstall/mozinstall/mozinstall.py
@@ -0,0 +1,342 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from optparse import OptionParser
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import time
+import zipfile
+
+import mozfile
+import mozinfo
+
+try:
+ import pefile
+ has_pefile = True
+except ImportError:
+ has_pefile = False
+
+if mozinfo.isMac:
+ from plistlib import readPlist
+
+
+TIMEOUT_UNINSTALL = 60
+
+
+class InstallError(Exception):
+ """Thrown when installation fails. Includes traceback if available."""
+
+
+class InvalidBinary(Exception):
+ """Thrown when the binary cannot be found after the installation."""
+
+
+class InvalidSource(Exception):
+ """Thrown when the specified source is not a recognized file type.
+
+ Supported types:
+ Linux: tar.gz, tar.bz2
+ Mac: dmg
+ Windows: zip, exe
+
+ """
+
+
+class UninstallError(Exception):
+ """Thrown when uninstallation fails. Includes traceback if available."""
+
+
+def get_binary(path, app_name):
+ """Find the binary in the specified path, and return its path. If binary is
+ not found throw an InvalidBinary exception.
+
+ :param path: Path within to search for the binary
+ :param app_name: Application binary without file extension to look for
+ """
+ binary = None
+
+ # On OS X we can get the real binary from the app bundle
+ if mozinfo.isMac:
+ plist = '%s/Contents/Info.plist' % path
+ if not os.path.isfile(plist):
+ raise InvalidBinary('%s/Contents/Info.plist not found' % path)
+
+ binary = os.path.join(path, 'Contents/MacOS/',
+ readPlist(plist)['CFBundleExecutable'])
+
+ else:
+ app_name = app_name.lower()
+
+ if mozinfo.isWin:
+ app_name = app_name + '.exe'
+
+ for root, dirs, files in os.walk(path):
+ for filename in files:
+ # os.access evaluates to False for some reason, so not using it
+ if filename.lower() == app_name:
+ binary = os.path.realpath(os.path.join(root, filename))
+ break
+
+ if not binary:
+ # The expected binary has not been found.
+ raise InvalidBinary('"%s" does not contain a valid binary.' % path)
+
+ return binary
+
+
+def install(src, dest):
+ """Install a zip, exe, tar.gz, tar.bz2 or dmg file, and return the path of
+ the installation folder.
+
+ :param src: Path to the install file
+ :param dest: Path to install to (to ensure we do not overwrite any existent
+ files the folder should not exist yet)
+ """
+ src = os.path.realpath(src)
+ dest = os.path.realpath(dest)
+
+ if not is_installer(src):
+ raise InvalidSource(src + ' is not valid installer file.')
+
+ did_we_create = False
+ if not os.path.exists(dest):
+ did_we_create = True
+ os.makedirs(dest)
+
+ trbk = None
+ try:
+ install_dir = None
+ if src.lower().endswith('.dmg'):
+ install_dir = _install_dmg(src, dest)
+ elif src.lower().endswith('.exe'):
+ install_dir = _install_exe(src, dest)
+ elif zipfile.is_zipfile(src) or tarfile.is_tarfile(src):
+ install_dir = mozfile.extract(src, dest)[0]
+
+ return install_dir
+
+ except:
+ cls, exc, trbk = sys.exc_info()
+ if did_we_create:
+ try:
+ # try to uninstall this properly
+ uninstall(dest)
+ except:
+ # uninstall may fail, let's just try to clean the folder
+ # in this case
+ try:
+ mozfile.remove(dest)
+ except:
+ pass
+ if issubclass(cls, Exception):
+ error = InstallError('Failed to install "%s (%s)"' % (src, str(exc)))
+ raise InstallError, error, trbk
+ # any other kind of exception like KeyboardInterrupt is just re-raised.
+ raise cls, exc, trbk
+
+ finally:
+ # trbk won't get GC'ed due to circular reference
+ # http://docs.python.org/library/sys.html#sys.exc_info
+ del trbk
+
+
+def is_installer(src):
+ """Tests if the given file is a valid installer package.
+
+ Supported types:
+ Linux: tar.gz, tar.bz2
+ Mac: dmg
+ Windows: zip, exe
+
+ On Windows pefile will be used to determine if the executable is the
+ right type, if it is installed on the system.
+
+ :param src: Path to the install file.
+ """
+ src = os.path.realpath(src)
+
+ if not os.path.isfile(src):
+ return False
+
+ if mozinfo.isLinux:
+ return tarfile.is_tarfile(src)
+ elif mozinfo.isMac:
+ return src.lower().endswith('.dmg')
+ elif mozinfo.isWin:
+ if zipfile.is_zipfile(src):
+ return True
+
+ if os.access(src, os.X_OK) and src.lower().endswith('.exe'):
+ if has_pefile:
+ # try to determine if binary is actually a gecko installer
+ pe_data = pefile.PE(src)
+ data = {}
+ for info in getattr(pe_data, 'FileInfo', []):
+ if info.Key == 'StringFileInfo':
+ for string in info.StringTable:
+ data.update(string.entries)
+ return 'BuildID' not in data
+ else:
+ # pefile not available, just assume a proper binary was passed in
+ return True
+
+ return False
+
+
+def uninstall(install_folder):
+ """Uninstalls the application in the specified path. If it has been
+ installed via an installer on Windows, use the uninstaller first.
+
+ :param install_folder: Path of the installation folder
+
+ """
+ install_folder = os.path.realpath(install_folder)
+ assert os.path.isdir(install_folder), \
+ 'installation folder "%s" exists.' % install_folder
+
+ # On Windows we have to use the uninstaller. If it's not available fallback
+ # to the directory removal code
+ if mozinfo.isWin:
+ uninstall_folder = '%s\uninstall' % install_folder
+ log_file = '%s\uninstall.log' % uninstall_folder
+
+ if os.path.isfile(log_file):
+ trbk = None
+ try:
+ cmdArgs = ['%s\uninstall\helper.exe' % install_folder, '/S']
+ result = subprocess.call(cmdArgs)
+ if result is not 0:
+ raise Exception('Execution of uninstaller failed.')
+
+ # The uninstaller spawns another process so the subprocess call
+ # returns immediately. We have to wait until the uninstall
+ # folder has been removed or until we run into a timeout.
+ end_time = time.time() + TIMEOUT_UNINSTALL
+ while os.path.exists(uninstall_folder):
+ time.sleep(1)
+
+ if time.time() > end_time:
+ raise Exception('Failure removing uninstall folder.')
+
+ except Exception, ex:
+ cls, exc, trbk = sys.exc_info()
+ error = UninstallError('Failed to uninstall %s (%s)' % (install_folder, str(ex)))
+ raise UninstallError, error, trbk
+
+ finally:
+ # trbk won't get GC'ed due to circular reference
+ # http://docs.python.org/library/sys.html#sys.exc_info
+ del trbk
+
+ # Ensure that we remove any trace of the installation. Even the uninstaller
+ # on Windows leaves files behind we have to explicitely remove.
+ mozfile.remove(install_folder)
+
+
+def _install_dmg(src, dest):
+ """Extract a dmg file into the destination folder and return the
+ application folder.
+
+ src -- DMG image which has to be extracted
+ dest -- the path to extract to
+
+ """
+ try:
+ proc = subprocess.Popen('hdiutil attach -nobrowse -noautoopen "%s"' % src,
+ shell=True,
+ stdout=subprocess.PIPE)
+
+ for data in proc.communicate()[0].split():
+ if data.find('/Volumes/') != -1:
+ appDir = data
+ break
+
+ for appFile in os.listdir(appDir):
+ if appFile.endswith('.app'):
+ appName = appFile
+ break
+
+ mounted_path = os.path.join(appDir, appName)
+
+ dest = os.path.join(dest, appName)
+
+ # copytree() would fail if dest already exists.
+ if os.path.exists(dest):
+ raise InstallError('App bundle "%s" already exists.' % dest)
+
+ shutil.copytree(mounted_path, dest, False)
+
+ finally:
+ subprocess.call('hdiutil detach %s -quiet' % appDir,
+ shell=True)
+
+ return dest
+
+
+def _install_exe(src, dest):
+ """Run the MSI installer to silently install the application into the
+ destination folder. Return the folder path.
+
+ Arguments:
+ src -- MSI installer to be executed
+ dest -- the path to install to
+
+ """
+ # The installer doesn't automatically create a sub folder. Lets guess the
+ # best name from the src file name
+ filename = os.path.basename(src)
+ dest = os.path.join(dest, filename.split('.')[0])
+
+ # possibly gets around UAC in vista (still need to run as administrator)
+ os.environ['__compat_layer'] = 'RunAsInvoker'
+ cmd = '"%s" /extractdir=%s' % (src, os.path.realpath(dest))
+
+ # As long as we support Python 2.4 check_call will not be available.
+ result = subprocess.call(cmd)
+
+ if result is not 0:
+ raise Exception('Execution of installer failed.')
+
+ return dest
+
+
+def install_cli(argv=sys.argv[1:]):
+ parser = OptionParser(usage="usage: %prog [options] installer")
+ parser.add_option('-d', '--destination',
+ dest='dest',
+ default=os.getcwd(),
+ help='Directory to install application into. '
+ '[default: "%default"]')
+ parser.add_option('--app', dest='app',
+ default='firefox',
+ help='Application being installed. [default: %default]')
+
+ (options, args) = parser.parse_args(argv)
+ if not len(args) == 1:
+ parser.error('An installer file has to be specified.')
+
+ src = args[0]
+
+ # Run it
+ if os.path.isdir(src):
+ binary = get_binary(src, app_name=options.app)
+ else:
+ install_path = install(src, options.dest)
+ binary = get_binary(install_path, app_name=options.app)
+
+ print binary
+
+
+def uninstall_cli(argv=sys.argv[1:]):
+ parser = OptionParser(usage="usage: %prog install_path")
+
+ (options, args) = parser.parse_args(argv)
+ if not len(args) == 1:
+ parser.error('An installation path has to be specified.')
+
+ # Run it
+ uninstall(argv[0])
diff --git a/testing/mozbase/mozinstall/setup.py b/testing/mozbase/mozinstall/setup.py
new file mode 100644
index 000000000..7759f0728
--- /dev/null
+++ b/testing/mozbase/mozinstall/setup.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from setuptools import setup
+
+try:
+ here = os.path.dirname(os.path.abspath(__file__))
+ description = file(os.path.join(here, 'README.md')).read()
+except IOError:
+ description = None
+
+PACKAGE_VERSION = '1.12'
+
+deps = ['mozinfo >= 0.7',
+ 'mozfile >= 1.0',
+ ]
+
+setup(name='mozInstall',
+ version=PACKAGE_VERSION,
+ description="package for installing and uninstalling Mozilla applications",
+ long_description="see http://mozbase.readthedocs.org/",
+ # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ classifiers=['Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL 2.0',
+ packages=['mozinstall'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ tests_require=['mozprocess >= 0.15', ],
+ # we have to generate two more executables for those systems that cannot run as Administrator
+ # and the filename containing "install" triggers the UAC
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozinstall = mozinstall:install_cli
+ mozuninstall = mozinstall:uninstall_cli
+ moz_add_to_system = mozinstall:install_cli
+ moz_remove_from_system = mozinstall:uninstall_cli
+ """,
+ )
diff --git a/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.dmg b/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.dmg
new file mode 100644
index 000000000..f7f36f631
--- /dev/null
+++ b/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.dmg
Binary files differ
diff --git a/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.tar.bz2 b/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.tar.bz2
new file mode 100644
index 000000000..cb046a0e7
--- /dev/null
+++ b/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.tar.bz2
Binary files differ
diff --git a/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.zip b/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.zip
new file mode 100644
index 000000000..7c3f61a5e
--- /dev/null
+++ b/testing/mozbase/mozinstall/tests/Installer-Stubs/firefox.zip
Binary files differ
diff --git a/testing/mozbase/mozinstall/tests/manifest.ini b/testing/mozbase/mozinstall/tests/manifest.ini
new file mode 100644
index 000000000..528fdea7b
--- /dev/null
+++ b/testing/mozbase/mozinstall/tests/manifest.ini
@@ -0,0 +1 @@
+[test.py]
diff --git a/testing/mozbase/mozinstall/tests/test.py b/testing/mozbase/mozinstall/tests/test.py
new file mode 100644
index 000000000..b4c53bb42
--- /dev/null
+++ b/testing/mozbase/mozinstall/tests/test.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozinfo
+import mozinstall
+import mozfile
+import os
+import tempfile
+import unittest
+
+# Store file location at load time
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestMozInstall(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ """ Setting up stub installers """
+ cls.dmg = os.path.join(here, 'Installer-Stubs', 'firefox.dmg')
+ # XXX: We have removed firefox.exe since it is not valid for mozinstall 1.12 and higher
+ # Bug 1157352 - We should grab a firefox.exe from the build process or download it
+ cls.exe = os.path.join(here, 'Installer-Stubs', 'firefox.exe')
+ cls.zipfile = os.path.join(here, 'Installer-Stubs', 'firefox.zip')
+ cls.bz2 = os.path.join(here, 'Installer-Stubs', 'firefox.tar.bz2')
+
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ mozfile.rmtree(self.tempdir)
+
+ @unittest.skipIf(mozinfo.isWin, "Bug 1157352 - We need a new firefox.exe "
+ "for mozinstall 1.12 and higher.")
+ def test_get_binary(self):
+ """ Test mozinstall's get_binary method """
+
+ if mozinfo.isLinux:
+ installdir = mozinstall.install(self.bz2, self.tempdir)
+ binary = os.path.join(installdir, 'firefox')
+ self.assertEqual(binary, mozinstall.get_binary(installdir, 'firefox'))
+
+ elif mozinfo.isWin:
+ installdir_exe = mozinstall.install(self.exe,
+ os.path.join(self.tempdir, 'exe'))
+ binary_exe = os.path.join(installdir_exe, 'core', 'firefox.exe')
+ self.assertEqual(binary_exe, mozinstall.get_binary(installdir_exe,
+ 'firefox'))
+
+ installdir_zip = mozinstall.install(self.zipfile,
+ os.path.join(self.tempdir, 'zip'))
+ binary_zip = os.path.join(installdir_zip, 'firefox.exe')
+ self.assertEqual(binary_zip, mozinstall.get_binary(installdir_zip,
+ 'firefox'))
+
+ elif mozinfo.isMac:
+ installdir = mozinstall.install(self.dmg, self.tempdir)
+ binary = os.path.join(installdir, 'Contents', 'MacOS', 'firefox')
+ self.assertEqual(binary, mozinstall.get_binary(installdir, 'firefox'))
+
+ def test_get_binary_error(self):
+ """ Test an InvalidBinary error is raised """
+
+ tempdir_empty = tempfile.mkdtemp()
+ self.assertRaises(mozinstall.InvalidBinary, mozinstall.get_binary,
+ tempdir_empty, 'firefox')
+ mozfile.rmtree(tempdir_empty)
+
+ @unittest.skipIf(mozinfo.isWin, "Bug 1157352 - We need a new firefox.exe "
+ "for mozinstall 1.12 and higher.")
+ def test_is_installer(self):
+ """ Test we can identify a correct installer """
+
+ if mozinfo.isLinux:
+ self.assertTrue(mozinstall.is_installer(self.bz2))
+
+ if mozinfo.isWin:
+ # test zip installer
+ self.assertTrue(mozinstall.is_installer(self.zipfile))
+
+ # test exe installer
+ self.assertTrue(mozinstall.is_installer(self.exe))
+
+ try:
+ # test stub browser file
+ # without pefile on the system this test will fail
+ import pefile # noqa
+ stub_exe = os.path.join(here, 'build_stub', 'firefox.exe')
+ self.assertFalse(mozinstall.is_installer(stub_exe))
+ except ImportError:
+ pass
+
+ if mozinfo.isMac:
+ self.assertTrue(mozinstall.is_installer(self.dmg))
+
+ def test_invalid_source_error(self):
+ """ Test InvalidSource error is raised with an incorrect installer """
+
+ if mozinfo.isLinux:
+ self.assertRaises(mozinstall.InvalidSource, mozinstall.install,
+ self.dmg, 'firefox')
+
+ elif mozinfo.isWin:
+ self.assertRaises(mozinstall.InvalidSource, mozinstall.install,
+ self.bz2, 'firefox')
+
+ elif mozinfo.isMac:
+ self.assertRaises(mozinstall.InvalidSource, mozinstall.install,
+ self.bz2, 'firefox')
+
+ @unittest.skipIf(mozinfo.isWin, "Bug 1157352 - We need a new firefox.exe "
+ "for mozinstall 1.12 and higher.")
+ def test_install(self):
+ """ Test mozinstall's install capability """
+
+ if mozinfo.isLinux:
+ installdir = mozinstall.install(self.bz2, self.tempdir)
+ self.assertEqual(os.path.join(self.tempdir, 'firefox'), installdir)
+
+ elif mozinfo.isWin:
+ installdir_exe = mozinstall.install(self.exe,
+ os.path.join(self.tempdir, 'exe'))
+ self.assertEqual(os.path.join(self.tempdir, 'exe', 'firefox'),
+ installdir_exe)
+
+ installdir_zip = mozinstall.install(self.zipfile,
+ os.path.join(self.tempdir, 'zip'))
+ self.assertEqual(os.path.join(self.tempdir, 'zip', 'firefox'),
+ installdir_zip)
+
+ elif mozinfo.isMac:
+ installdir = mozinstall.install(self.dmg, self.tempdir)
+ self.assertEqual(os.path.join(os.path.realpath(self.tempdir),
+ 'FirefoxStub.app'), installdir)
+
+ @unittest.skipIf(mozinfo.isWin, "Bug 1157352 - We need a new firefox.exe "
+ "for mozinstall 1.12 and higher.")
+ def test_uninstall(self):
+ """ Test mozinstall's uninstall capabilites """
+ # Uninstall after installing
+
+ if mozinfo.isLinux:
+ installdir = mozinstall.install(self.bz2, self.tempdir)
+ mozinstall.uninstall(installdir)
+ self.assertFalse(os.path.exists(installdir))
+
+ elif mozinfo.isWin:
+ # Exe installer for Windows
+ installdir_exe = mozinstall.install(self.exe,
+ os.path.join(self.tempdir, 'exe'))
+ mozinstall.uninstall(installdir_exe)
+ self.assertFalse(os.path.exists(installdir_exe))
+
+ # Zip installer for Windows
+ installdir_zip = mozinstall.install(self.zipfile,
+ os.path.join(self.tempdir, 'zip'))
+ mozinstall.uninstall(installdir_zip)
+ self.assertFalse(os.path.exists(installdir_zip))
+
+ elif mozinfo.isMac:
+ installdir = mozinstall.install(self.dmg, self.tempdir)
+ mozinstall.uninstall(installdir)
+ self.assertFalse(os.path.exists(installdir))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozleak/mozleak/__init__.py b/testing/mozbase/mozleak/mozleak/__init__.py
new file mode 100644
index 000000000..ce0c084e0
--- /dev/null
+++ b/testing/mozbase/mozleak/mozleak/__init__.py
@@ -0,0 +1,11 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+mozleak is a library for extracting memory leaks from leak logs files.
+"""
+
+from .leaklog import process_leak_log
+
+__all__ = ['process_leak_log']
diff --git a/testing/mozbase/mozleak/mozleak/leaklog.py b/testing/mozbase/mozleak/mozleak/leaklog.py
new file mode 100644
index 000000000..9688974d1
--- /dev/null
+++ b/testing/mozbase/mozleak/mozleak/leaklog.py
@@ -0,0 +1,205 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import os
+import re
+
+
+def _get_default_logger():
+ from mozlog import get_default_logger
+ log = get_default_logger(component='mozleak')
+
+ if not log:
+ import logging
+ log = logging.getLogger(__name__)
+ return log
+
+
+def process_single_leak_file(leakLogFileName, processType, leakThreshold,
+ ignoreMissingLeaks, log=None,
+ stackFixer=None):
+ """Process a single leak log.
+ """
+
+ # | |Per-Inst Leaked| Total Rem|
+ # 0 |TOTAL | 17 192| 419115886 2|
+ # 833 |nsTimerImpl | 60 120| 24726 2|
+ # 930 |Foo<Bar, Bar> | 32 8| 100 1|
+ lineRe = re.compile(r"^\s*\d+ \|"
+ r"(?P<name>[^|]+)\|"
+ r"\s*(?P<size>-?\d+)\s+(?P<bytesLeaked>-?\d+)\s*\|"
+ r"\s*-?\d+\s+(?P<numLeaked>-?\d+)")
+ # The class name can contain spaces. We remove trailing whitespace later.
+
+ log = log or _get_default_logger()
+
+ processString = "%s process:" % processType
+ crashedOnPurpose = False
+ totalBytesLeaked = None
+ leakedObjectAnalysis = []
+ leakedObjectNames = []
+ recordLeakedObjects = False
+ with open(leakLogFileName, "r") as leaks:
+ for line in leaks:
+ if line.find("purposefully crash") > -1:
+ crashedOnPurpose = True
+ matches = lineRe.match(line)
+ if not matches:
+ # eg: the leak table header row
+ strippedLine = line.rstrip()
+ log.info(stackFixer(strippedLine) if stackFixer else strippedLine)
+ continue
+ name = matches.group("name").rstrip()
+ size = int(matches.group("size"))
+ bytesLeaked = int(matches.group("bytesLeaked"))
+ numLeaked = int(matches.group("numLeaked"))
+ # Output the raw line from the leak log table if it is the TOTAL row,
+ # or is for an object row that has been leaked.
+ if numLeaked != 0 or name == "TOTAL":
+ log.info(line.rstrip())
+ # Analyse the leak log, but output later or it will interrupt the
+ # leak table
+ if name == "TOTAL":
+ # Multiple default processes can end up writing their bloat views into a single
+ # log, particularly on B2G. Eventually, these should be split into multiple
+ # logs (bug 1068869), but for now, we report the largest leak.
+ if totalBytesLeaked is not None:
+ log.warning("leakcheck | %s "
+ "multiple BloatView byte totals found"
+ % processString)
+ else:
+ totalBytesLeaked = 0
+ if bytesLeaked > totalBytesLeaked:
+ totalBytesLeaked = bytesLeaked
+ # Throw out the information we had about the previous bloat
+ # view.
+ leakedObjectNames = []
+ leakedObjectAnalysis = []
+ recordLeakedObjects = True
+ else:
+ recordLeakedObjects = False
+ if size < 0 or bytesLeaked < 0 or numLeaked < 0:
+ log.error("TEST-UNEXPECTED-FAIL | leakcheck | %s negative leaks caught!"
+ % processString)
+ continue
+ if name != "TOTAL" and numLeaked != 0 and recordLeakedObjects:
+ leakedObjectNames.append(name)
+ leakedObjectAnalysis.append("TEST-INFO | leakcheck | %s leaked %d %s"
+ % (processString, numLeaked, name))
+
+ log.info('\n'.join(leakedObjectAnalysis))
+
+ if totalBytesLeaked is None:
+ # We didn't see a line with name 'TOTAL'
+ if crashedOnPurpose:
+ log.info("TEST-INFO | leakcheck | %s deliberate crash and thus no leak log"
+ % processString)
+ elif ignoreMissingLeaks:
+ log.info("TEST-INFO | leakcheck | %s ignoring missing output line for total leaks"
+ % processString)
+ else:
+ log.error("TEST-UNEXPECTED-FAIL | leakcheck | %s missing output line for total leaks!"
+ % processString)
+ log.info("TEST-INFO | leakcheck | missing output line from log file %s"
+ % leakLogFileName)
+ return
+
+ if totalBytesLeaked == 0:
+ log.info("TEST-PASS | leakcheck | %s no leaks detected!" %
+ processString)
+ return
+
+ # Create a comma delimited string of the first N leaked objects found,
+ # to aid with bug summary matching in TBPL. Note: The order of the objects
+ # had no significance (they're sorted alphabetically).
+ maxSummaryObjects = 5
+ leakedObjectSummary = ', '.join(leakedObjectNames[:maxSummaryObjects])
+ if len(leakedObjectNames) > maxSummaryObjects:
+ leakedObjectSummary += ', ...'
+
+ message = "leakcheck | %s %d bytes leaked (%s)" % (
+ processString, totalBytesLeaked, leakedObjectSummary)
+
+ # totalBytesLeaked will include any expected leaks, so it can be off
+ # by a few thousand bytes.
+ if totalBytesLeaked > leakThreshold:
+ log.error("TEST-UNEXPECTED-FAIL | %s" % message)
+ else:
+ log.warning(message)
+
+
+def process_leak_log(leak_log_file, leak_thresholds=None,
+ ignore_missing_leaks=None, log=None,
+ stack_fixer=None):
+ """Process the leak log, including separate leak logs created
+ by child processes.
+
+ Use this function if you want an additional PASS/FAIL summary.
+ It must be used with the |XPCOM_MEM_BLOAT_LOG| environment variable.
+
+ The base of leak_log_file for a non-default process needs to end with
+ _proctype_pid12345.log
+ "proctype" is a string denoting the type of the process, which should
+ be the result of calling XRE_ChildProcessTypeToString(). 12345 is
+ a series of digits that is the pid for the process. The .log is
+ optional.
+
+ All other file names are treated as being for default processes.
+
+ leak_thresholds should be a dict mapping process types to leak thresholds,
+ in bytes. If a process type is not present in the dict the threshold
+ will be 0.
+
+ ignore_missing_leaks should be a list of process types. If a process
+ creates a leak log without a TOTAL, then we report an error if it isn't
+ in the list ignore_missing_leaks.
+ """
+
+ log = log or _get_default_logger()
+
+ leakLogFile = leak_log_file
+ if not os.path.exists(leakLogFile):
+ log.warning(
+ "leakcheck | refcount logging is off, so leaks can't be detected!")
+ return
+
+ leakThresholds = leak_thresholds or {}
+ ignoreMissingLeaks = ignore_missing_leaks or []
+
+ # This list is based on kGeckoProcessTypeString. ipdlunittest processes likely
+ # are not going to produce leak logs we will ever see.
+ knownProcessTypes = ["default", "plugin", "tab", "geckomediaplugin", "gpu"]
+
+ for processType in knownProcessTypes:
+ log.info("TEST-INFO | leakcheck | %s process: leak threshold set at %d bytes"
+ % (processType, leakThresholds.get(processType, 0)))
+
+ for processType in leakThresholds:
+ if processType not in knownProcessTypes:
+ log.error("TEST-UNEXPECTED-FAIL | leakcheck | "
+ "Unknown process type %s in leakThresholds" % processType)
+
+ (leakLogFileDir, leakFileBase) = os.path.split(leakLogFile)
+ if leakFileBase[-4:] == ".log":
+ leakFileBase = leakFileBase[:-4]
+ fileNameRegExp = re.compile(r"_([a-z]*)_pid\d*.log$")
+ else:
+ fileNameRegExp = re.compile(r"_([a-z]*)_pid\d*$")
+
+ for fileName in os.listdir(leakLogFileDir):
+ if fileName.find(leakFileBase) != -1:
+ thisFile = os.path.join(leakLogFileDir, fileName)
+ m = fileNameRegExp.search(fileName)
+ if m:
+ processType = m.group(1)
+ else:
+ processType = "default"
+ if processType not in knownProcessTypes:
+ log.error("TEST-UNEXPECTED-FAIL | leakcheck | "
+ "Leak log with unknown process type %s" % processType)
+ leakThreshold = leakThresholds.get(processType, 0)
+ process_single_leak_file(thisFile, processType, leakThreshold,
+ processType in ignoreMissingLeaks,
+ log=log, stackFixer=stack_fixer)
diff --git a/testing/mozbase/mozleak/setup.py b/testing/mozbase/mozleak/setup.py
new file mode 100644
index 000000000..76eb64a9f
--- /dev/null
+++ b/testing/mozbase/mozleak/setup.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+
+PACKAGE_NAME = 'mozleak'
+PACKAGE_VERSION = '0.1'
+
+
+setup(
+ name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Library for extracting memory leaks from leak logs files",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozleak'],
+ zip_safe=False,
+ install_requires=[],
+)
diff --git a/testing/mozbase/mozlog/mozlog/__init__.py b/testing/mozbase/mozlog/mozlog/__init__.py
new file mode 100644
index 000000000..1fe4dc738
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/__init__.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Mozlog aims to standardize log handling and formatting within Mozilla.
+
+It implements a JSON-based structured logging protocol with convenience
+facilities for recording test results.
+
+The old unstructured module is deprecated. It simply wraps Python's
+logging_ module and adds a few convenience methods for logging test
+results and events.
+"""
+
+import sys
+
+from . import commandline
+from . import structuredlog
+from . import unstructured
+from .structuredlog import get_default_logger, set_default_logger
+from .proxy import get_proxy_logger
+
+# Backwards compatibility shim for consumers that use mozlog.structured
+structured = sys.modules[__name__]
+sys.modules['{}.structured'.format(__name__)] = structured
+
+__all__ = ['commandline', 'structuredlog', 'unstructured',
+ 'get_default_logger', 'set_default_logger', 'get_proxy_logger',
+ 'structured']
diff --git a/testing/mozbase/mozlog/mozlog/commandline.py b/testing/mozbase/mozlog/mozlog/commandline.py
new file mode 100644
index 000000000..107708154
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/commandline.py
@@ -0,0 +1,282 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import optparse
+import os
+import sys
+from collections import defaultdict
+
+from . import handlers
+from . import formatters
+from .structuredlog import StructuredLogger, set_default_logger
+
+log_formatters = {
+ 'raw': (formatters.JSONFormatter, "Raw structured log messages"),
+ 'unittest': (formatters.UnittestFormatter, "Unittest style output"),
+ 'xunit': (formatters.XUnitFormatter, "xUnit compatible XML"),
+ 'html': (formatters.HTMLFormatter, "HTML report"),
+ 'mach': (formatters.MachFormatter, "Human-readable output"),
+ 'tbpl': (formatters.TbplFormatter, "TBPL style log format"),
+ 'errorsummary': (formatters.ErrorSummaryFormatter, argparse.SUPPRESS),
+}
+
+TEXT_FORMATTERS = ('raw', 'mach')
+"""a subset of formatters for non test harnesses related applications"""
+
+
+def level_filter_wrapper(formatter, level):
+ return handlers.LogLevelFilter(formatter, level)
+
+
+def verbose_wrapper(formatter, verbose):
+ formatter.verbose = verbose
+ return formatter
+
+
+def compact_wrapper(formatter, compact):
+ formatter.compact = compact
+ return formatter
+
+
+def buffer_handler_wrapper(handler, buffer_limit):
+ if buffer_limit == "UNLIMITED":
+ buffer_limit = None
+ else:
+ buffer_limit = int(buffer_limit)
+ return handlers.BufferHandler(handler, buffer_limit)
+
+
+def valgrind_handler_wrapper(handler):
+ return handlers.ValgrindHandler(handler)
+
+
+def default_formatter_options(log_type, overrides):
+ formatter_option_defaults = {
+ "raw": {
+ "level": "debug"
+ }
+ }
+ rv = {"verbose": False,
+ "level": "info"}
+ rv.update(formatter_option_defaults.get(log_type, {}))
+
+ if overrides is not None:
+ rv.update(overrides)
+
+ return rv
+
+fmt_options = {
+ # <option name>: (<wrapper function>, description, <applicable formatters>, action)
+ # "action" is used by the commandline parser in use.
+ 'verbose': (verbose_wrapper,
+ "Enables verbose mode for the given formatter.",
+ ["mach"], "store_true"),
+ 'compact': (compact_wrapper,
+ "Enables compact mode for the given formatter.",
+ ["tbpl"], "store_true"),
+ 'level': (level_filter_wrapper,
+ "A least log level to subscribe to for the given formatter "
+ "(debug, info, error, etc.)",
+ ["mach", "raw", "tbpl"], "store"),
+ 'buffer': (buffer_handler_wrapper,
+ "If specified, enables message buffering at the given buffer size limit.",
+ ["mach", "tbpl"], "store"),
+}
+
+
+def log_file(name):
+ if name == "-":
+ return sys.stdout
+ # ensure we have a correct dirpath by using realpath
+ dirpath = os.path.dirname(os.path.realpath(name))
+ if not os.path.exists(dirpath):
+ os.makedirs(dirpath)
+ return open(name, "w")
+
+
+def add_logging_group(parser, include_formatters=None):
+ """
+ Add logging options to an argparse ArgumentParser or
+ optparse OptionParser.
+
+ Each formatter has a corresponding option of the form --log-{name}
+ where {name} is the name of the formatter. The option takes a value
+ which is either a filename or "-" to indicate stdout.
+
+ :param parser: The ArgumentParser or OptionParser object that should have
+ logging options added.
+ :param include_formatters: List of formatter names that should be included
+ in the option group. Default to None, meaning
+ all the formatters are included. A common use
+ of this option is to specify
+ :data:`TEXT_FORMATTERS` to include only the
+ most useful formatters for a command line tool
+ that is not related to test harnesses.
+ """
+ group_name = "Output Logging"
+ group_description = ("Each option represents a possible logging format "
+ "and takes a filename to write that format to, "
+ "or '-' to write to stdout.")
+
+ if include_formatters is None:
+ include_formatters = log_formatters.keys()
+
+ if isinstance(parser, optparse.OptionParser):
+ group = optparse.OptionGroup(parser,
+ group_name,
+ group_description)
+ parser.add_option_group(group)
+ opt_log_type = 'str'
+ group_add = group.add_option
+ else:
+ group = parser.add_argument_group(group_name,
+ group_description)
+ opt_log_type = log_file
+ group_add = group.add_argument
+
+ for name, (cls, help_str) in log_formatters.iteritems():
+ if name in include_formatters:
+ group_add("--log-" + name, action="append", type=opt_log_type,
+ help=help_str)
+
+ for optname, (cls, help_str, formatters_, action) in fmt_options.iteritems():
+ for fmt in formatters_:
+ # make sure fmt is in log_formatters and is accepted
+ if fmt in log_formatters and fmt in include_formatters:
+ group_add("--log-%s-%s" % (fmt, optname), action=action,
+ help=help_str, default=None)
+
+
+def setup_handlers(logger, formatters, formatter_options, allow_unused_options=False):
+ """
+ Add handlers to the given logger according to the formatters and
+ options provided.
+
+ :param logger: The logger configured by this function.
+ :param formatters: A dict of {formatter, [streams]} to use in handlers.
+ :param formatter_options: a dict of {formatter: {option: value}} to
+ to use when configuring formatters.
+ """
+ unused_options = set(formatter_options.keys()) - set(formatters.keys())
+ if unused_options and not allow_unused_options:
+ msg = ("Options specified for unused formatter(s) (%s) have no effect" %
+ list(unused_options))
+ raise ValueError(msg)
+
+ for fmt, streams in formatters.iteritems():
+ formatter_cls = log_formatters[fmt][0]
+ formatter = formatter_cls()
+ handler_wrappers_and_options = []
+
+ for option, value in formatter_options[fmt].iteritems():
+ wrapper, wrapper_args = None, ()
+ if option == "valgrind":
+ wrapper = valgrind_handler_wrapper
+ elif option == "buffer":
+ wrapper, wrapper_args = fmt_options[option][0], (value,)
+ else:
+ formatter = fmt_options[option][0](formatter, value)
+
+ if wrapper is not None:
+ handler_wrappers_and_options.append((wrapper, wrapper_args))
+
+ for value in streams:
+ handler = handlers.StreamHandler(stream=value, formatter=formatter)
+ for wrapper, wrapper_args in handler_wrappers_and_options:
+ handler = wrapper(handler, *wrapper_args)
+ logger.add_handler(handler)
+
+
+def setup_logging(logger, args, defaults=None, formatter_defaults=None,
+ allow_unused_options=False):
+ """
+ Configure a structuredlogger based on command line arguments.
+
+ The created structuredlogger will also be set as the default logger, and
+ can be retrieved with :py:func:`~mozlog.get_default_logger`.
+
+ :param logger: A StructuredLogger instance or string name. If a string, a
+ new StructuredLogger instance will be created using
+ `logger` as the name.
+ :param args: A dictionary of {argument_name:value} produced from
+ parsing the command line arguments for the application
+ :param defaults: A dictionary of {formatter name: output stream} to apply
+ when there is no logging supplied on the command line. If
+ this isn't supplied, reasonable defaults are chosen
+ (coloured mach formatting if stdout is a terminal, or raw
+ logs otherwise).
+ :param formatter_defaults: A dictionary of {option_name: default_value} to provide
+ to the formatters in the absence of command line overrides.
+ :rtype: StructuredLogger
+ """
+
+ if not isinstance(logger, StructuredLogger):
+ logger = StructuredLogger(logger)
+
+ # Keep track of any options passed for formatters.
+ formatter_options = {}
+ # Keep track of formatters and list of streams specified.
+ formatters = defaultdict(list)
+ found = False
+ found_stdout_logger = False
+ if args is None:
+ args = {}
+ if not hasattr(args, 'iteritems'):
+ args = vars(args)
+
+ if defaults is None:
+ if sys.__stdout__.isatty():
+ defaults = {"mach": sys.stdout}
+ else:
+ defaults = {"raw": sys.stdout}
+
+ for name, values in args.iteritems():
+ parts = name.split('_')
+ if len(parts) > 3:
+ continue
+ # Our args will be ['log', <formatter>]
+ # or ['log', <formatter>, <option>]
+ # or ['valgrind']
+ if parts[0] == 'log' and values is not None:
+ if len(parts) == 1 or parts[1] not in log_formatters:
+ continue
+ if len(parts) == 2:
+ _, formatter = parts
+ for value in values:
+ found = True
+ if isinstance(value, basestring):
+ value = log_file(value)
+ if value == sys.stdout:
+ found_stdout_logger = True
+ formatters[formatter].append(value)
+ if len(parts) == 3:
+ _, formatter, opt = parts
+ if formatter not in formatter_options:
+ formatter_options[formatter] = default_formatter_options(formatter,
+ formatter_defaults)
+ formatter_options[formatter][opt] = values
+
+ # If there is no user-specified logging, go with the default options
+ if not found:
+ for name, value in defaults.iteritems():
+ formatters[name].append(value)
+
+ elif not found_stdout_logger and sys.stdout in defaults.values():
+ for name, value in defaults.iteritems():
+ if value == sys.stdout:
+ formatters[name].append(value)
+
+ for name in formatters:
+ if name not in formatter_options:
+ formatter_options[name] = default_formatter_options(name, formatter_defaults)
+
+ # If the user specified --valgrind, add it as an option for all formatters
+ if args.get('valgrind', None) is not None:
+ for name in formatters:
+ formatter_options[name]['valgrind'] = True
+ setup_handlers(logger, formatters, formatter_options, allow_unused_options)
+ set_default_logger(logger)
+
+ return logger
diff --git a/testing/mozbase/mozlog/mozlog/formatters/__init__.py b/testing/mozbase/mozlog/mozlog/formatters/__init__.py
new file mode 100644
index 000000000..5d37ecda8
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/__init__.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from unittest import UnittestFormatter
+from xunit import XUnitFormatter
+from html import HTMLFormatter
+from machformatter import MachFormatter
+from tbplformatter import TbplFormatter
+from errorsummary import ErrorSummaryFormatter
+
+try:
+ import ujson as json
+except ImportError:
+ import json
+
+
+def JSONFormatter():
+ return lambda x: json.dumps(x) + "\n"
+
+__all__ = ['UnittestFormatter', 'XUnitFormatter', 'HTMLFormatter',
+ 'MachFormatter', 'TbplFormatter', 'ErrorSummaryFormatter',
+ 'JSONFormatter']
diff --git a/testing/mozbase/mozlog/mozlog/formatters/base.py b/testing/mozbase/mozlog/mozlog/formatters/base.py
new file mode 100644
index 000000000..62c079bb2
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/base.py
@@ -0,0 +1,20 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from ..reader import LogHandler
+
+
+class BaseFormatter(LogHandler):
+ """Base class for implementing non-trivial formatters.
+
+ Subclasses are expected to provide a method for each action type they
+ wish to handle, each taking a single argument for the test data.
+ For example a trivial subclass that just produces the id of each test as
+ it starts might be::
+
+ class StartIdFormatter(BaseFormatter);
+ def test_start(data):
+ #For simplicity in the example pretend the id is always a string
+ return data["test"]
+ """
diff --git a/testing/mozbase/mozlog/mozlog/formatters/errorsummary.py b/testing/mozbase/mozlog/mozlog/formatters/errorsummary.py
new file mode 100644
index 000000000..5e0e84474
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/errorsummary.py
@@ -0,0 +1,69 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+from base import BaseFormatter
+
+
+class ErrorSummaryFormatter(BaseFormatter):
+
+ def __init__(self):
+ self.line_count = 0
+
+ def __call__(self, data):
+ rv = BaseFormatter.__call__(self, data)
+ self.line_count += 1
+ return rv
+
+ def _output(self, data_type, data):
+ data["action"] = data_type
+ data["line"] = self.line_count
+ return "%s\n" % json.dumps(data)
+
+ def _output_test(self, test, subtest, item):
+ data = {"test": test,
+ "subtest": subtest,
+ "status": item["status"],
+ "expected": item["expected"],
+ "message": item.get("message"),
+ "stack": item.get("stack")}
+ return self._output("test_result", data)
+
+ def test_status(self, item):
+ if "expected" not in item:
+ return
+ return self._output_test(item["test"], item["subtest"], item)
+
+ def test_end(self, item):
+ if "expected" not in item:
+ return
+ return self._output_test(item["test"], None, item)
+
+ def log(self, item):
+ if item["level"] not in ("ERROR", "CRITICAL"):
+ return
+
+ data = {"level": item["level"],
+ "message": item["message"]}
+ return self._output("log", data)
+
+ def crash(self, item):
+ data = {"test": item.get("test"),
+ "signature": item["signature"],
+ "stackwalk_stdout": item.get("stackwalk_stdout"),
+ "stackwalk_stderr": item.get("stackwalk_stderr")}
+ return self._output("crash", data)
+
+ def lint(self, item):
+ data = {
+ "level": item["level"],
+ "path": item["path"],
+ "message": item["message"],
+ "lineno": item["lineno"],
+ "column": item.get("column"),
+ "rule": item.get("rule"),
+ "linter": item.get("linter")
+ }
+ self._output("lint", data)
diff --git a/testing/mozbase/mozlog/mozlog/formatters/html/__init__.py b/testing/mozbase/mozlog/mozlog/formatters/html/__init__.py
new file mode 100644
index 000000000..e607ecb87
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/html/__init__.py
@@ -0,0 +1,3 @@
+from html import HTMLFormatter
+
+__all__ = ['HTMLFormatter']
diff --git a/testing/mozbase/mozlog/mozlog/formatters/html/html.py b/testing/mozbase/mozlog/mozlog/formatters/html/html.py
new file mode 100755
index 000000000..0ec244aa6
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/html/html.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import base64
+import cgi
+from datetime import datetime
+import os
+
+from .. import base
+
+from collections import defaultdict
+
+html = None
+raw = None
+
+base_path = os.path.split(__file__)[0]
+
+
+def do_defered_imports():
+ global html
+ global raw
+
+ from .xmlgen import html, raw
+
+
+class HTMLFormatter(base.BaseFormatter):
+ """Formatter that produces a simple HTML-formatted report."""
+
+ def __init__(self):
+ do_defered_imports()
+ self.suite_name = None
+ self.result_rows = []
+ self.test_count = defaultdict(int)
+ self.start_times = {}
+ self.suite_times = {"start": None,
+ "end": None}
+ self.head = None
+ self.env = {}
+
+ def suite_start(self, data):
+ self.suite_times["start"] = data["time"]
+ self.suite_name = data["source"]
+ with open(os.path.join(base_path, "style.css")) as f:
+ self.head = html.head(
+ html.meta(charset="utf-8"),
+ html.title(data["source"]),
+ html.style(raw(f.read())))
+
+ date_format = "%d %b %Y %H:%M:%S"
+ version_info = data.get("version_info")
+ if version_info:
+ self.env["Device identifier"] = version_info.get("device_id")
+ self.env["Device firmware (base)"] = version_info.get("device_firmware_version_base")
+ self.env["Device firmware (date)"] = (
+ datetime.utcfromtimestamp(int(version_info.get("device_firmware_date")))
+ .strftime(date_format) if
+ "device_firmware_date" in version_info else None)
+ self.env["Device firmware (incremental)"] = version_info.get(
+ "device_firmware_version_incremental")
+ self.env["Device firmware (release)"] = version_info.get(
+ "device_firmware_version_release")
+ self.env["Gaia date"] = (
+ datetime.utcfromtimestamp(int(version_info.get("gaia_date")))
+ .strftime(date_format) if
+ "gaia_date" in version_info else None)
+ self.env["Gecko version"] = version_info.get("application_version")
+ self.env["Gecko build"] = version_info.get("application_buildid")
+
+ if version_info.get("application_changeset"):
+ self.env["Gecko revision"] = version_info.get("application_changeset")
+ if version_info.get("application_repository"):
+ self.env["Gecko revision"] = html.a(
+ version_info.get("application_changeset"),
+ href="/".join([version_info.get("application_repository"),
+ version_info.get("application_changeset")]),
+ target="_blank")
+
+ if version_info.get("gaia_changeset"):
+ self.env["Gaia revision"] = html.a(
+ version_info.get("gaia_changeset")[:12],
+ href="https://github.com/mozilla-b2g/gaia/commit/%s" % version_info.get(
+ "gaia_changeset"),
+ target="_blank")
+
+ device_info = data.get("device_info")
+ if device_info:
+ self.env["Device uptime"] = device_info.get("uptime")
+ self.env["Device memory"] = device_info.get("memtotal")
+ self.env["Device serial"] = device_info.get("id")
+
+ def suite_end(self, data):
+ self.suite_times["end"] = data["time"]
+ return self.generate_html()
+
+ def test_start(self, data):
+ self.start_times[data["test"]] = data["time"]
+
+ def test_end(self, data):
+ self.make_result_html(data)
+
+ def make_result_html(self, data):
+ tc_time = (data["time"] - self.start_times.pop(data["test"])) / 1000.
+ additional_html = []
+ debug = data.get("extra", {})
+ # Add support for log exported from wptrunner. The structure of
+ # reftest_screenshots is listed in wptrunner/executors/base.py.
+ if debug.get('reftest_screenshots'):
+ log_data = debug.get("reftest_screenshots", {})
+ debug = {
+ 'image1': 'data:image/png;base64,' + log_data[0].get("screenshot", {}),
+ 'image2': 'data:image/png;base64,' + log_data[2].get("screenshot", {}),
+ 'differences': "Not Implemented",
+ }
+
+ links_html = []
+
+ status = status_name = data["status"]
+ expected = data.get("expected", status)
+
+ if status != expected:
+ status_name = "UNEXPECTED_" + status
+ elif status not in ("PASS", "SKIP"):
+ status_name = "EXPECTED_" + status
+
+ self.test_count[status_name] += 1
+
+ if status in ['SKIP', 'FAIL', 'ERROR']:
+ if debug.get('differences'):
+ images = [
+ ('image1', 'Image 1 (test)'),
+ ('image2', 'Image 2 (reference)')
+ ]
+ for title, description in images:
+ screenshot = '%s' % debug[title]
+ additional_html.append(html.div(
+ html.a(html.img(src=screenshot), href="#"),
+ html.br(),
+ html.a(description),
+ class_='screenshot'))
+
+ if debug.get('screenshot'):
+ screenshot = '%s' % debug['screenshot']
+ screenshot = 'data:image/png;base64,' + screenshot
+
+ additional_html.append(html.div(
+ html.a(html.img(src=screenshot), href="#"),
+ class_='screenshot'))
+
+ for name, content in debug.items():
+ if name in ['screenshot', 'image1', 'image2']:
+ if not content.startswith('data:image/png;base64,'):
+ href = 'data:image/png;base64,%s' % content
+ else:
+ href = content
+ else:
+ # Encode base64 to avoid that some browsers (such as Firefox, Opera)
+ # treats '#' as the start of another link if it is contained in the data URL.
+ # Use 'charset=utf-8' to show special characters like Chinese.
+ utf_encoded = unicode(content).encode('utf-8', 'xmlcharrefreplace')
+ href = 'data:text/html;charset=utf-8;base64,%s' % base64.b64encode(utf_encoded)
+
+ links_html.append(html.a(
+ name.title(),
+ class_=name,
+ href=href,
+ target='_blank'))
+ links_html.append(' ')
+
+ log = html.div(class_='log')
+ output = data.get('stack', '').splitlines()
+ output.extend(data.get('message', '').splitlines())
+ for line in output:
+ separator = line.startswith(' ' * 10)
+ if separator:
+ log.append(line[:80])
+ else:
+ if line.lower().find("error") != -1 or line.lower().find("exception") != -1:
+ log.append(html.span(raw(cgi.escape(line)), class_='error'))
+ else:
+ log.append(raw(cgi.escape(line)))
+ log.append(html.br())
+ additional_html.append(log)
+
+ self.result_rows.append(
+ html.tr([html.td(status_name, class_='col-result'),
+ html.td(data['test'], class_='col-name'),
+ html.td('%.2f' % tc_time, class_='col-duration'),
+ html.td(links_html, class_='col-links'),
+ html.td(additional_html, class_='debug')],
+ class_=status_name.lower() + ' results-table-row'))
+
+ def generate_html(self):
+ generated = datetime.utcnow()
+ with open(os.path.join(base_path, "main.js")) as main_f:
+ doc = html.html(
+ self.head,
+ html.body(
+ html.script(raw(main_f.read())),
+ html.p('Report generated on %s at %s' % (
+ generated.strftime('%d-%b-%Y'),
+ generated.strftime('%H:%M:%S'))),
+ html.h2('Environment'),
+ html.table(
+ [html.tr(html.td(k), html.td(v))
+ for k, v in sorted(self.env.items()) if v],
+ id='environment'),
+
+ html.h2('Summary'),
+ html.p('%i tests ran in %.1f seconds.' % (sum(self.test_count.itervalues()),
+ (self.suite_times["end"] -
+ self.suite_times["start"]) / 1000.),
+ html.br(),
+ html.span('%i passed' % self.test_count["PASS"], class_='pass'), ', ',
+ html.span('%i skipped' % self.test_count["SKIP"], class_='skip'), ', ',
+ html.span('%i failed' % self.test_count[
+ "UNEXPECTED_FAIL"], class_='fail'), ', ',
+ html.span('%i errors' % self.test_count[
+ "UNEXPECTED_ERROR"], class_='error'), '.',
+ html.br(),
+ html.span('%i expected failures' % self.test_count["EXPECTED_FAIL"],
+ class_='expected_fail'), ', ',
+ html.span('%i unexpected passes' % self.test_count["UNEXPECTED_PASS"],
+ class_='unexpected_pass'), '.'),
+ html.h2('Results'),
+ html.table([html.thead(
+ html.tr([
+ html.th('Result', class_='sortable', col='result'),
+ html.th('Test', class_='sortable', col='name'),
+ html.th('Duration', class_='sortable numeric', col='duration'),
+ html.th('Links')]), id='results-table-head'),
+ html.tbody(self.result_rows,
+ id='results-table-body')], id='results-table')))
+
+ return u"<!DOCTYPE html>\n" + doc.unicode(indent=2)
diff --git a/testing/mozbase/mozlog/mozlog/formatters/html/main.js b/testing/mozbase/mozlog/mozlog/formatters/html/main.js
new file mode 100644
index 000000000..8b4a40ed4
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/html/main.js
@@ -0,0 +1,172 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+function toArray(iter) {
+ if (iter === null) {
+ return null;
+ }
+ return Array.prototype.slice.call(iter);
+}
+
+function find(selector, elem) {
+ if (!elem) {
+ elem = document;
+ }
+ return elem.querySelector(selector);
+}
+
+function find_all(selector, elem) {
+ if (!elem) {
+ elem = document;
+ }
+ return toArray(elem.querySelectorAll(selector));
+}
+
+addEventListener("DOMContentLoaded", function() {
+ reset_sort_headers();
+
+ split_debug_onto_two_rows();
+
+ find_all('.col-links a.screenshot').forEach(function(elem) {
+ elem.addEventListener("click",
+ function(event) {
+ var node = elem;
+ while (node && !node.classList.contains('results-table-row')) {
+ node = node.parentNode;
+ }
+ if (node != null) {
+ if (node.nextSibling &&
+ node.nextSibling.classList.contains("debug")) {
+ var href = find('.screenshot img', node.nextSibling).src;
+ window.open(href);
+ }
+ }
+ event.preventDefault();
+ }, false)
+ });
+
+ find_all('.screenshot a').forEach(function(elem) {
+ elem.addEventListener("click",
+ function(event) {
+ window.open(find('img', elem).getAttribute('src'));
+ event.preventDefault();
+ }, false)
+ });
+
+ find_all('.sortable').forEach(function(elem) {
+ elem.addEventListener("click",
+ function(event) {
+ toggle_sort_states(elem);
+ var colIndex = toArray(elem.parentNode.childNodes).indexOf(elem);
+ var key = elem.classList.contains('numeric') ? key_num : key_alpha;
+ sort_table(elem, key(colIndex));
+ }, false)
+ });
+
+});
+
+function sort_table(clicked, key_func) {
+ one_row_for_data();
+ var rows = find_all('.results-table-row');
+ var reversed = !clicked.classList.contains('asc');
+
+ var sorted_rows = sort(rows, key_func, reversed);
+
+ var parent = document.getElementById('results-table-body');
+ sorted_rows.forEach(function(elem) {
+ parent.appendChild(elem);
+ });
+
+ split_debug_onto_two_rows();
+}
+
+function sort(items, key_func, reversed) {
+ var sort_array = items.map(function(item, i) {
+ return [key_func(item), i];
+ });
+ var multiplier = reversed ? -1 : 1;
+
+ sort_array.sort(function(a, b) {
+ var key_a = a[0];
+ var key_b = b[0];
+ return multiplier * (key_a >= key_b ? 1 : -1);
+ });
+
+ return sort_array.map(function(item) {
+ var index = item[1];
+ return items[index];
+ });
+}
+
+function key_alpha(col_index) {
+ return function(elem) {
+ return elem.childNodes[col_index].firstChild.data.toLowerCase();
+ };
+}
+
+function key_num(col_index) {
+ return function(elem) {
+ return parseFloat(elem.childNodes[col_index].firstChild.data);
+ };
+}
+
+function reset_sort_headers() {
+ find_all('.sort-icon').forEach(function(elem) {
+ elem.parentNode.removeChild(elem);
+ });
+ find_all('.sortable').forEach(function(elem) {
+ var icon = document.createElement("div");
+ icon.className = "sort-icon";
+ icon.textContent = "vvv";
+ elem.insertBefore(icon, elem.firstChild);
+ elem.classList.remove("desc", "active");
+ elem.classList.add("asc", "inactive");
+ });
+}
+
+function toggle_sort_states(elem) {
+ //if active, toggle between asc and desc
+ if (elem.classList.contains('active')) {
+ elem.classList.toggle('asc');
+ elem.classList.toggle('desc');
+ }
+
+ //if inactive, reset all other functions and add ascending active
+ if (elem.classList.contains('inactive')) {
+ reset_sort_headers();
+ elem.classList.remove('inactive');
+ elem.classList.add('active');
+ }
+}
+
+function split_debug_onto_two_rows() {
+ find_all('tr.results-table-row').forEach(function(elem) {
+ var new_row = document.createElement("tr")
+ new_row.className = "debug";
+ elem.parentNode.insertBefore(new_row, elem.nextSibling);
+ find_all(".debug", elem).forEach(function (td_elem) {
+ if (find(".log", td_elem)) {
+ new_row.appendChild(td_elem);
+ td_elem.colSpan=5;
+ } else {
+ td_elem.parentNode.removeChild(td_elem);
+ }
+ });
+ });
+}
+
+function one_row_for_data() {
+ find_all('tr.results-table-row').forEach(function(elem) {
+ if (elem.nextSibling.classList.contains('debug')) {
+ toArray(elem.nextSibling.childNodes).forEach(
+ function (td_elem) {
+ elem.appendChild(td_elem);
+ })
+ } else {
+ var new_td = document.createElement("td");
+ new_td.className = "debug";
+ elem.appendChild(new_td);
+ }
+ });
+}
diff --git a/testing/mozbase/mozlog/mozlog/formatters/html/style.css b/testing/mozbase/mozlog/mozlog/formatters/html/style.css
new file mode 100644
index 000000000..50609b40a
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/html/style.css
@@ -0,0 +1,154 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+body {
+ font-family: Helvetica, Arial, sans-serif;
+ font-size: 12px;
+ min-width: 1200px;
+ color: #999;
+}
+h2 {
+ font-size: 16px;
+ color: black;
+}
+
+p {
+ color: black;
+}
+
+a {
+ color: #999;
+}
+
+table {
+ border-collapse: collapse;
+}
+
+/******************************
+ * SUMMARY INFORMATION
+ ******************************/
+
+#environment td {
+ padding: 5px;
+ border: 1px solid #E6E6E6;
+}
+
+#environment tr:nth-child(odd) {
+ background-color: #f6f6f6;
+}
+
+/******************************
+ * TEST RESULT COLORS
+ ******************************/
+span.pass, .pass .col-result {
+ color: green;
+}
+span.expected_fail, .expected_fail .col-result,
+span.expected_skip, .expected_skip .col-result,
+span.skip, .skip .col-result {
+ color: orange;
+}
+span.error, .error .col-result,
+span.fail, .fail .col-result,
+span.unexpected_error, .unexpected_error .col-result,
+span.unexpected_fail, .unexpected_fail .col-result,
+span.unexpected_pass, .unexpected_pass .col-result {
+ color: red;
+}
+
+/******************************
+ * RESULTS TABLE
+ *
+ * 1. Table Layout
+ * 2. Debug
+ * 3. Sorting items
+ *
+ ******************************/
+
+/*------------------
+ * 1. Table Layout
+ *------------------*/
+
+#results-table {
+ border: 1px solid #e6e6e6;
+ color: #999;
+ font-size: 12px;
+ width: 100%
+}
+
+#results-table th, #results-table td {
+ padding: 5px;
+ border: 1px solid #E6E6E6;
+ text-align: left
+}
+#results-table th {
+ font-weight: bold
+}
+
+/*------------------
+ * 2. Debug
+ *------------------*/
+
+.log:only-child {
+ height: inherit
+}
+.log {
+ background-color: #e6e6e6;
+ border: 1px solid #e6e6e6;
+ color: black;
+ display: block;
+ font-family: "Courier New", Courier, monospace;
+ height: 230px;
+ overflow-y: scroll;
+ padding: 5px;
+ white-space: pre-wrap
+}
+div.screenshot {
+ border: 1px solid #e6e6e6;
+ float: left;
+ margin-left: 5px;
+ height: 220px
+}
+div.screenshot img {
+ height: 220px
+}
+
+/*if the result is passed or xpassed don't show debug row*/
+.passed + .debug, .unexpected.pass + .debug {
+ display: none;
+}
+
+/*------------------
+ * 3. Sorting items
+ *------------------*/
+.sortable {
+ cursor: pointer;
+}
+
+.sort-icon {
+ font-size: 0px;
+ float: left;
+ margin-right: 5px;
+ margin-top: 5px;
+ /*triangle*/
+ width: 0;
+ height: 0;
+ border-left: 8px solid transparent;
+ border-right: 8px solid transparent;
+}
+
+.inactive .sort-icon {
+ /*finish triangle*/
+ border-top: 8px solid #E6E6E6;
+}
+
+.asc.active .sort-icon {
+ /*finish triangle*/
+ border-bottom: 8px solid #999;
+}
+
+.desc.active .sort-icon {
+ /*finish triangle*/
+ border-top: 8px solid #999;
+}
diff --git a/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py b/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py
new file mode 100644
index 000000000..e545e9a7d
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py
@@ -0,0 +1,283 @@
+"""
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+This file is originally from: https://bitbucket.org/hpk42/py, specifically:
+https://bitbucket.org/hpk42/py/src/980c8d526463958ee7cae678a7e4e9b054f36b94/py/_xmlgen.py?at=default
+by holger krekel, holger at merlinux eu. 2009
+"""
+import sys
+import re
+
+if sys.version_info >= (3, 0):
+ def u(s):
+ return s
+
+ def unicode(x):
+ if hasattr(x, '__unicode__'):
+ return x.__unicode__()
+ return str(x)
+else:
+ def u(s):
+ return unicode(s)
+ unicode = unicode
+
+
+class NamespaceMetaclass(type):
+
+ def __getattr__(self, name):
+ if name[:1] == '_':
+ raise AttributeError(name)
+ if self == Namespace:
+ raise ValueError("Namespace class is abstract")
+ tagspec = self.__tagspec__
+ if tagspec is not None and name not in tagspec:
+ raise AttributeError(name)
+ classattr = {}
+ if self.__stickyname__:
+ classattr['xmlname'] = name
+ cls = type(name, (self.__tagclass__,), classattr)
+ setattr(self, name, cls)
+ return cls
+
+
+class Tag(list):
+
+ class Attr(object):
+
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super(Tag, self).__init__(args)
+ self.attr = self.Attr(**kwargs)
+
+ def __unicode__(self):
+ return self.unicode(indent=0)
+ __str__ = __unicode__
+
+ def unicode(self, indent=2):
+ l = []
+ SimpleUnicodeVisitor(l.append, indent).visit(self)
+ return u("").join(l)
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return "<%r tag object %d>" % (name, id(self))
+
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+ '__tagspec__': None,
+ '__tagclass__': Tag,
+ '__stickyname__': False,
+})
+
+
+class HtmlTag(Tag):
+
+ def unicode(self, indent=2):
+ l = []
+ HtmlVisitor(l.append, indent, shortempty=False).visit(self)
+ return u("").join(l)
+
+# exported plain html namespace
+
+
+class html(Namespace):
+ __tagclass__ = HtmlTag
+ __stickyname__ = True
+ __tagspec__ = dict([(x, 1) for x in (
+ 'a,abbr,acronym,address,applet,area,b,bdo,big,blink,'
+ 'blockquote,body,br,button,caption,center,cite,code,col,'
+ 'colgroup,comment,dd,del,dfn,dir,div,dl,dt,em,embed,'
+ 'fieldset,font,form,frameset,h1,h2,h3,h4,h5,h6,head,html,'
+ 'i,iframe,img,input,ins,kbd,label,legend,li,link,listing,'
+ 'map,marquee,menu,meta,multicol,nobr,noembed,noframes,'
+ 'noscript,object,ol,optgroup,option,p,pre,q,s,script,'
+ 'select,small,span,strike,strong,style,sub,sup,table,'
+ 'tbody,td,textarea,tfoot,th,thead,title,tr,tt,u,ul,xmp,'
+ 'base,basefont,frame,hr,isindex,param,samp,var'
+ ).split(',') if x])
+
+ class Style(object):
+
+ def __init__(self, **kw):
+ for x, y in kw.items():
+ x = x.replace('_', '-')
+ setattr(self, x, y)
+
+
+class raw(object):
+ """just a box that can contain a unicode string that will be
+ included directly in the output"""
+
+ def __init__(self, uniobj):
+ self.uniobj = uniobj
+
+
+class SimpleUnicodeVisitor(object):
+ """ recursive visitor to write unicode. """
+
+ def __init__(self, write, indent=0, curindent=0, shortempty=True):
+ self.write = write
+ self.cache = {}
+ self.visited = {} # for detection of recursion
+ self.indent = indent
+ self.curindent = curindent
+ self.parents = []
+ self.shortempty = shortempty # short empty tags or not
+
+ def visit(self, node):
+ """ dispatcher on node's class/bases name. """
+ cls = node.__class__
+ try:
+ visitmethod = self.cache[cls]
+ except KeyError:
+ for subclass in cls.__mro__:
+ visitmethod = getattr(self, subclass.__name__, None)
+ if visitmethod is not None:
+ break
+ else:
+ visitmethod = self.__object
+ self.cache[cls] = visitmethod
+ visitmethod(node)
+
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
+ # self.write(obj)
+ self.write(escape(unicode(obj)))
+
+ def raw(self, obj):
+ self.write(obj.uniobj)
+
+ def list(self, obj):
+ assert id(obj) not in self.visited
+ self.visited[id(obj)] = 1
+ for elem in obj:
+ self.visit(elem)
+
+ def Tag(self, tag):
+ assert id(tag) not in self.visited
+ try:
+ tag.parent = self.parents[-1]
+ except IndexError:
+ tag.parent = None
+ self.visited[id(tag)] = 1
+ tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+ if self.curindent and not self._isinline(tagname):
+ self.write("\n" + u(' ') * self.curindent)
+ if tag:
+ self.curindent += self.indent
+ self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+ self.parents.append(tag)
+ for x in tag:
+ self.visit(x)
+ self.parents.pop()
+ self.write(u('</%s>') % tagname)
+ self.curindent -= self.indent
+ else:
+ nameattr = tagname + self.attributes(tag)
+ if self._issingleton(tagname):
+ self.write(u('<%s/>') % (nameattr,))
+ else:
+ self.write(u('<%s></%s>') % (nameattr, tagname))
+
+ def attributes(self, tag):
+ # serialize attributes
+ attrlist = dir(tag.attr)
+ attrlist.sort()
+ l = []
+ for name in attrlist:
+ res = self.repr_attribute(tag.attr, name)
+ if res is not None:
+ l.append(res)
+ l.extend(self.getstyle(tag))
+ return u("").join(l)
+
+ def repr_attribute(self, attrs, name):
+ if name[:2] != '__':
+ value = getattr(attrs, name)
+ if name.endswith('_'):
+ name = name[:-1]
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
+
+ def getstyle(self, tag):
+ """ return attribute list suitable for styling. """
+ try:
+ styledict = tag.style.__dict__
+ except AttributeError:
+ return []
+ else:
+ stylelist = [x + ': ' + y for x, y in styledict.items()]
+ return [u(' style="%s"') % u('; ').join(stylelist)]
+
+ def _issingleton(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return self.shortempty
+
+ def _isinline(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return False
+
+
+class HtmlVisitor(SimpleUnicodeVisitor):
+
+ single = dict([(x, 1) for x in
+ ('br,img,area,param,col,hr,meta,link,base,'
+ 'input,frame').split(',')])
+ inline = dict([(x, 1) for x in
+ ('a abbr acronym b basefont bdo big br cite code dfn em font '
+ 'i img input kbd label q s samp select small span strike '
+ 'strong sub sup textarea tt u var'.split(' '))])
+
+ def repr_attribute(self, attrs, name):
+ if name == 'class_':
+ value = getattr(attrs, name)
+ if value is None:
+ return
+ return super(HtmlVisitor, self).repr_attribute(attrs, name)
+
+ def _issingleton(self, tagname):
+ return tagname in self.single
+
+ def _isinline(self, tagname):
+ return tagname in self.inline
+
+
+class _escape:
+
+ def __init__(self):
+ self.escape = {
+ u('"'): u('&quot;'), u('<'): u('&lt;'), u('>'): u('&gt;'),
+ u('&'): u('&amp;'), u("'"): u('&apos;'),
+ }
+ self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+ def _replacer(self, match):
+ return self.escape[match.group(0)]
+
+ def __call__(self, ustring):
+ """ xml-escape the given unicode string. """
+ ustring = unicode(ustring)
+ return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()
diff --git a/testing/mozbase/mozlog/mozlog/formatters/machformatter.py b/testing/mozbase/mozlog/mozlog/formatters/machformatter.py
new file mode 100644
index 000000000..f8f1abc25
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/machformatter.py
@@ -0,0 +1,395 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import time
+from collections import defaultdict
+
+try:
+ import blessings
+except ImportError:
+ blessings = None
+
+import base
+from .process import strstatus
+
+
+def format_seconds(total):
+ """Format number of seconds to MM:SS.DD form."""
+ minutes, seconds = divmod(total, 60)
+ return '%2d:%05.2f' % (minutes, seconds)
+
+
+class NullTerminal(object):
+
+ def __getattr__(self, name):
+ return self._id
+
+ def _id(self, value):
+ return value
+
+
+class MachFormatter(base.BaseFormatter):
+
+ def __init__(self, start_time=None, write_interval=False, write_times=True,
+ terminal=None, disable_colors=False):
+
+ if disable_colors:
+ terminal = None
+ elif terminal is None and blessings is not None:
+ terminal = blessings.Terminal()
+
+ if start_time is None:
+ start_time = time.time()
+ start_time = int(start_time * 1000)
+ self.start_time = start_time
+ self.write_interval = write_interval
+ self.write_times = write_times
+ self.status_buffer = {}
+ self.has_unexpected = {}
+ self.last_time = None
+ self.terminal = terminal
+ self.verbose = False
+ self._known_pids = set()
+
+ self.summary_values = {"tests": 0,
+ "subtests": 0,
+ "expected": 0,
+ "unexpected": defaultdict(int),
+ "skipped": 0}
+ self.summary_unexpected = []
+
+ def __call__(self, data):
+ s = base.BaseFormatter.__call__(self, data)
+ if s is None:
+ return
+
+ time = format_seconds(self._time(data))
+ action = data["action"].upper()
+ thread = data["thread"]
+
+ # Not using the NullTerminal here is a small optimisation to cut the number of
+ # function calls
+ if self.terminal is not None:
+ test = self._get_test_id(data)
+
+ time = self.terminal.blue(time)
+
+ color = None
+
+ if data["action"] == "test_end":
+ if "expected" not in data and not self.has_unexpected[test]:
+ color = self.terminal.green
+ else:
+ color = self.terminal.red
+ elif data["action"] in ("suite_start", "suite_end",
+ "test_start", "test_status"):
+ color = self.terminal.yellow
+ elif data["action"] == "crash":
+ color = self.terminal.red
+
+ if color is not None:
+ action = color(action)
+
+ return "%s %s: %s %s\n" % (time, action, thread, s)
+
+ def _get_test_id(self, data):
+ test_id = data.get("test")
+ if isinstance(test_id, list):
+ test_id = tuple(test_id)
+ return test_id
+
+ def _get_file_name(self, test_id):
+ if isinstance(test_id, (str, unicode)):
+ return test_id
+
+ if isinstance(test_id, tuple):
+ return "".join(test_id)
+
+ assert False, "unexpected test_id"
+
+ def suite_start(self, data):
+ self.summary_values = {"tests": 0,
+ "subtests": 0,
+ "expected": 0,
+ "unexpected": defaultdict(int),
+ "skipped": 0}
+ self.summary_unexpected = []
+ return "%i" % len(data["tests"])
+
+ def suite_end(self, data):
+ term = self.terminal if self.terminal is not None else NullTerminal()
+
+ heading = "Summary"
+ rv = ["", heading, "=" * len(heading), ""]
+
+ has_subtests = self.summary_values["subtests"] > 0
+
+ if has_subtests:
+ rv.append("Ran %i tests (%i parents, %i subtests)" %
+ (self.summary_values["tests"] + self.summary_values["subtests"],
+ self.summary_values["tests"],
+ self.summary_values["subtests"]))
+ else:
+ rv.append("Ran %i tests" % self.summary_values["tests"])
+
+ rv.append("Expected results: %i" % self.summary_values["expected"])
+
+ unexpected_count = sum(self.summary_values["unexpected"].values())
+ if unexpected_count > 0:
+ unexpected_str = " (%s)" % ", ".join("%s: %i" % (key, value) for key, value in
+ sorted(self.summary_values["unexpected"].items()))
+ else:
+ unexpected_str = ""
+
+ rv.append("Unexpected results: %i%s" % (unexpected_count, unexpected_str))
+
+ if self.summary_values["skipped"] > 0:
+ rv.append("Skipped: %i" % self.summary_values["skipped"])
+ rv.append("")
+
+ if not self.summary_values["unexpected"]:
+ rv.append(term.green("OK"))
+ else:
+ heading = "Unexpected Results"
+ rv.extend([heading, "=" * len(heading), ""])
+ if has_subtests:
+ for test_id, results in self.summary_unexpected:
+ test = self._get_file_name(test_id)
+ rv.extend([test, "-" * len(test)])
+ for name, status, expected, message in results:
+ if name is None:
+ name = "[Parent]"
+ rv.append("%s %s" % (self.format_expected(status, expected), name))
+ else:
+ for test_id, results in self.summary_unexpected:
+ test = self._get_file_name(test_id)
+ assert len(results) == 1
+ name, status, expected, messge = results[0]
+ assert name is None
+ rv.append("%s %s" % (self.format_expected(status, expected), test))
+
+ return "\n".join(rv)
+
+ def format_expected(self, status, expected):
+ term = self.terminal if self.terminal is not None else NullTerminal()
+ if status == "ERROR":
+ color = term.red
+ else:
+ color = term.yellow
+
+ if expected in ("PASS", "OK"):
+ return color(status)
+
+ return color("%s expected %s" % (status, expected))
+
+ def test_start(self, data):
+ self.summary_values["tests"] += 1
+ return "%s" % (self._get_test_id(data),)
+
+ def test_end(self, data):
+ subtests = self._get_subtest_data(data)
+ unexpected = subtests["unexpected"]
+
+ message = data.get("message", "")
+ if "stack" in data:
+ stack = data["stack"]
+ if stack and stack[-1] != "\n":
+ stack += "\n"
+ message = stack + message
+
+ if "expected" in data:
+ parent_unexpected = True
+ expected_str = ", expected %s" % data["expected"]
+ unexpected.append((None, data["status"], data["expected"],
+ message))
+ else:
+ parent_unexpected = False
+ expected_str = ""
+
+ test = self._get_test_id(data)
+
+ if unexpected:
+ self.summary_unexpected.append((test, unexpected))
+ self._update_summary(data)
+
+ # Reset the counts to 0
+ self.status_buffer[test] = {"count": 0, "unexpected": [], "pass": 0}
+ self.has_unexpected[test] = bool(unexpected)
+
+ if subtests["count"] != 0:
+ rv = "Harness %s%s. Subtests passed %i/%i. Unexpected %s" % (
+ data["status"], expected_str, subtests["pass"], subtests["count"],
+ len(unexpected))
+ else:
+ rv = "%s%s" % (data["status"], expected_str)
+
+ if unexpected:
+ rv += "\n"
+ if len(unexpected) == 1 and parent_unexpected:
+ rv += "%s" % unexpected[0][-1]
+ else:
+ for name, status, expected, message in unexpected:
+ if name is None:
+ name = "[Parent]"
+ expected_str = "Expected %s, got %s" % (expected, status)
+ rv += "%s\n" % ("\n".join([name, "-" * len(name), expected_str, message]))
+ rv = rv[:-1]
+ return rv
+
+ def valgrind_error(self, data):
+ rv = " " + data['primary'] + "\n"
+ for line in data['secondary']:
+ rv = rv + line + "\n"
+
+ return rv
+
+ def test_status(self, data):
+ self.summary_values["subtests"] += 1
+
+ test = self._get_test_id(data)
+ if test not in self.status_buffer:
+ self.status_buffer[test] = {"count": 0, "unexpected": [], "pass": 0}
+ self.status_buffer[test]["count"] += 1
+
+ message = data.get("message", "")
+ if "stack" in data:
+ if message:
+ message += "\n"
+ message += data["stack"]
+
+ if data["status"] == "PASS":
+ self.status_buffer[test]["pass"] += 1
+
+ self._update_summary(data)
+
+ rv = None
+ status, subtest = data["status"], data["subtest"]
+ unexpected = "expected" in data
+ if self.verbose:
+ if self.terminal is not None:
+ status = (self.terminal.red if unexpected else self.terminal.green)(status)
+ rv = " ".join([subtest, status, message])
+ elif unexpected:
+ # We only append an unexpected summary if it was not logged
+ # directly by verbose mode.
+ self.status_buffer[test]["unexpected"].append((subtest,
+ status,
+ data["expected"],
+ message))
+ return rv
+
+ def _update_summary(self, data):
+ if "expected" in data:
+ self.summary_values["unexpected"][data["status"]] += 1
+ elif data["status"] == "SKIP":
+ self.summary_values["skipped"] += 1
+ else:
+ self.summary_values["expected"] += 1
+
+ def process_output(self, data):
+ rv = []
+
+ if "command" in data and data["process"] not in self._known_pids:
+ self._known_pids.add(data["process"])
+ rv.append('(pid:%s) Full command: %s' % (data["process"], data["command"]))
+
+ rv.append('(pid:%s) "%s"' % (data["process"], data["data"]))
+ return "\n".join(rv)
+
+ def crash(self, data):
+ test = self._get_test_id(data)
+
+ if data.get("stackwalk_returncode", 0) != 0 and not data.get("stackwalk_stderr"):
+ success = True
+ else:
+ success = False
+
+ rv = ["pid:%s. Test:%s. Minidump anaylsed:%s. Signature:[%s]" %
+ (data.get("pid", None), test, success, data["signature"])]
+
+ if data.get("minidump_path"):
+ rv.append("Crash dump filename: %s" % data["minidump_path"])
+
+ if data.get("stackwalk_returncode", 0) != 0:
+ rv.append("minidump_stackwalk exited with return code %d" %
+ data["stackwalk_returncode"])
+
+ if data.get("stackwalk_stderr"):
+ rv.append("stderr from minidump_stackwalk:")
+ rv.append(data["stackwalk_stderr"])
+ elif data.get("stackwalk_stdout"):
+ rv.append(data["stackwalk_stdout"])
+
+ if data.get("stackwalk_errors"):
+ rv.extend(data.get("stackwalk_errors"))
+
+ rv = "\n".join(rv)
+ if not rv[-1] == "\n":
+ rv += "\n"
+
+ return rv
+
+ def process_start(self, data):
+ rv = "Started process `%s`" % data['process']
+ desc = data.get('command')
+ if desc:
+ rv = '%s (%s)' % (rv, desc)
+ return rv
+
+ def process_exit(self, data):
+ return "%s: %s" % (data['process'], strstatus(data['exitcode']))
+
+ def log(self, data):
+ level = data.get("level").upper()
+
+ if self.terminal is not None:
+ if level in ("CRITICAL", "ERROR"):
+ level = self.terminal.red(level)
+ elif level == "WARNING":
+ level = self.terminal.yellow(level)
+ elif level == "INFO":
+ level = self.terminal.blue(level)
+
+ if data.get('component'):
+ rv = " ".join([data["component"], level, data["message"]])
+ else:
+ rv = "%s %s" % (level, data["message"])
+
+ if "stack" in data:
+ rv += "\n%s" % data["stack"]
+
+ return rv
+
+ def lint(self, data):
+ term = self.terminal if self.terminal is not None else NullTerminal()
+ fmt = "{path} {c1}{lineno}{column} {c2}{level}{normal} {message}" \
+ " {c1}{rule}({linter}){normal}"
+ message = fmt.format(
+ path=data["path"],
+ normal=term.normal,
+ c1=term.grey,
+ c2=term.red if data["level"] == 'error' else term.yellow,
+ lineno=str(data["lineno"]),
+ column=(":" + str(data["column"])) if data.get("column") else "",
+ level=data["level"],
+ message=data["message"],
+ rule='{} '.format(data["rule"]) if data.get("rule") else "",
+ linter=data["linter"].lower() if data.get("linter") else "",
+ )
+
+ return message
+
+ def _get_subtest_data(self, data):
+ test = self._get_test_id(data)
+ return self.status_buffer.get(test, {"count": 0, "unexpected": [], "pass": 0})
+
+ def _time(self, data):
+ entry_time = data["time"]
+ if self.write_interval and self.last_time is not None:
+ t = entry_time - self.last_time
+ self.last_time = entry_time
+ else:
+ t = entry_time - self.start_time
+
+ return t / 1000.
diff --git a/testing/mozbase/mozlog/mozlog/formatters/process.py b/testing/mozbase/mozlog/mozlog/formatters/process.py
new file mode 100644
index 000000000..46030cdc7
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/process.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import signal
+
+# a dict cache of signal number -> signal name
+_SIG_NAME = None
+
+
+def strsig(n):
+ """
+ Translate a process signal identifier to a human readable string.
+ """
+ global _SIG_NAME
+
+ if _SIG_NAME is None:
+ # cache signal names
+ _SIG_NAME = {}
+ for k in dir(signal):
+ if (k.startswith("SIG")
+ and not k.startswith("SIG_")
+ and k != "SIGCLD" and k != "SIGPOLL"):
+
+ _SIG_NAME[getattr(signal, k)] = k
+
+ # Realtime signals mostly have no names
+ if hasattr(signal, "SIGRTMIN") and hasattr(signal, "SIGRTMAX"):
+ for r in range(signal.SIGRTMIN + 1, signal.SIGRTMAX + 1):
+ _SIG_NAME[r] = "SIGRTMIN+" + str(r - signal.SIGRTMIN)
+
+ if n < 0 or n >= signal.NSIG:
+ return "out-of-range signal, number %s" % n
+ try:
+ return _SIG_NAME[n]
+ except KeyError:
+ return "unrecognized signal, number %s" % n
+
+
+def strstatus(status):
+ """
+ Returns a human readable string of a process exit code, as returned
+ by the subprocess module.
+ """
+ # 'status' is the exit status
+ if os.name != 'posix':
+ # Windows error codes are easier to look up if printed in hexadecimal
+ if status < 0:
+ status += 2**32
+ return "exit %x" % status
+ elif status >= 0:
+ return "exit %d" % status
+ else:
+ return "killed by %s" % strsig(-status)
diff --git a/testing/mozbase/mozlog/mozlog/formatters/tbplformatter.py b/testing/mozbase/mozlog/mozlog/formatters/tbplformatter.py
new file mode 100644
index 000000000..71152d455
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/tbplformatter.py
@@ -0,0 +1,244 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import functools
+from collections import deque
+
+from .base import BaseFormatter
+from .process import strstatus
+
+
+def output_subtests(func):
+ @functools.wraps(func)
+ def inner(self, data):
+ if self.subtests_count:
+ return self._format_subtests(data.get("component")) + func(self, data)
+ else:
+ return func(self, data)
+ return inner
+
+
+class TbplFormatter(BaseFormatter):
+ """Formatter that formats logs in the legacy formatting format used by TBPL
+ This is intended to be used to preserve backward compatibility with existing tools
+ hand-parsing this format.
+ """
+ def __init__(self, compact=False):
+ self.suite_start_time = None
+ self.test_start_times = {}
+ self.buffer = None
+ self.compact = compact
+ self.subtests_count = 0
+
+ @property
+ def compact(self):
+ return self._compact
+
+ @compact.setter
+ def compact(self, value):
+ self._compact = value
+ if value:
+ self.buffer = deque([], 10)
+ else:
+ self.buffer = None
+
+ def __call__(self, data):
+ return getattr(self, data["action"])(data)
+
+ def _format_subtests(self, component, subtract_context=False):
+ count = self.subtests_count
+ if subtract_context:
+ count -= len(self.buffer)
+ self.subtests_count = 0
+ return self._log({"level": "INFO",
+ "message": "." * count,
+ "component": component})
+
+ @output_subtests
+ def log(self, data):
+ return self._log(data)
+
+ def _log(self, data):
+ if data.get('component'):
+ message = "%s %s" % (data["component"], data["message"])
+ else:
+ message = data["message"]
+
+ if "stack" in data:
+ message += "\n%s" % data["stack"]
+
+ return "%s\n" % message
+
+ @output_subtests
+ def process_output(self, data):
+ return "PROCESS | %(process)s | %(data)s\n" % data
+
+ @output_subtests
+ def process_start(self, data):
+ msg = "TEST-INFO | started process %s" % data['process']
+ if 'command' in data:
+ msg = '%s (%s)' % (msg, data['command'])
+ return msg + '\n'
+
+ @output_subtests
+ def process_exit(self, data):
+ return "TEST-INFO | %s: %s\n" % (data['process'],
+ strstatus(data['exitcode']))
+
+ @output_subtests
+ def crash(self, data):
+ id = data["test"] if "test" in data else "pid: %s" % data["process"]
+
+ signature = data["signature"] if data["signature"] else "unknown top frame"
+ rv = ["PROCESS-CRASH | %s | application crashed [%s]" % (id, signature)]
+
+ if data.get("minidump_path"):
+ rv.append("Crash dump filename: %s" % data["minidump_path"])
+
+ if data.get("stackwalk_stderr"):
+ rv.append("stderr from minidump_stackwalk:")
+ rv.append(data["stackwalk_stderr"])
+ elif data.get("stackwalk_stdout"):
+ rv.append(data["stackwalk_stdout"])
+
+ if data.get("stackwalk_returncode", 0) != 0:
+ rv.append("minidump_stackwalk exited with return code %d" %
+ data["stackwalk_returncode"])
+
+ if data.get("stackwalk_errors"):
+ rv.extend(data.get("stackwalk_errors"))
+
+ rv = "\n".join(rv)
+ if not rv[-1] == "\n":
+ rv += "\n"
+
+ return rv
+
+ def suite_start(self, data):
+ self.suite_start_time = data["time"]
+ return "SUITE-START | Running %i tests\n" % len(data["tests"])
+
+ def test_start(self, data):
+ self.test_start_times[self.test_id(data["test"])] = data["time"]
+
+ return "TEST-START | %s\n" % data["test"]
+
+ def test_status(self, data):
+ if self.compact:
+ if "expected" in data:
+ rv = []
+ rv.append(self._format_subtests(data.get("component"), subtract_context=True))
+ rv.extend(self._format_status(item) for item in self.buffer)
+ rv.append(self._format_status(data))
+ self.buffer.clear()
+ return "".join(rv)
+ else:
+ self.subtests_count += 1
+ self.buffer.append(data)
+ else:
+ return self._format_status(data)
+
+ def _format_status(self, data):
+ message = "- " + data["message"] if "message" in data else ""
+ if "stack" in data:
+ message += "\n%s" % data["stack"]
+ if message and message[-1] == "\n":
+ message = message[:-1]
+
+ if "expected" in data:
+ if not message:
+ message = "- expected %s" % data["expected"]
+ failure_line = "TEST-UNEXPECTED-%s | %s | %s %s\n" % (
+ data["status"], data["test"], data["subtest"],
+ message)
+ if data["expected"] != "PASS":
+ info_line = "TEST-INFO | expected %s\n" % data["expected"]
+ return failure_line + info_line
+ return failure_line
+
+ return "TEST-%s | %s | %s %s\n" % (
+ data["status"], data["test"], data["subtest"],
+ message)
+
+ def test_end(self, data):
+ rv = []
+ if self.compact and self.subtests_count:
+ print_context = "expected" in data
+ rv.append(self._format_subtests(data.get("component"),
+ subtract_context=print_context))
+ if print_context:
+ rv.extend(self._format_status(item) for item in self.buffer)
+ self.buffer.clear()
+
+ test_id = self.test_id(data["test"])
+ duration_msg = ""
+
+ if test_id in self.test_start_times:
+ start_time = self.test_start_times.pop(test_id)
+ time = data["time"] - start_time
+ duration_msg = "took %ims" % time
+
+ if "expected" in data:
+ message = data.get("message", "")
+ if not message:
+ message = "expected %s" % data["expected"]
+ if "stack" in data:
+ message += "\n%s" % data["stack"]
+ if message and message[-1] == "\n":
+ message = message[:-1]
+
+ extra = data.get("extra", {})
+ if "reftest_screenshots" in extra:
+ screenshots = extra["reftest_screenshots"]
+ if len(screenshots) == 3:
+ message += ("\nREFTEST IMAGE 1 (TEST): data:image/png;base64,%s\n"
+ "REFTEST IMAGE 2 (REFERENCE): data:image/png;base64,%s") % (
+ screenshots[0]["screenshot"],
+ screenshots[2]["screenshot"])
+ elif len(screenshots) == 1:
+ message += "\nREFTEST IMAGE: data:image/png;base64,%(image1)s" \
+ % screenshots[0]["screenshot"]
+
+ failure_line = "TEST-UNEXPECTED-%s | %s | %s\n" % (
+ data["status"], test_id, message)
+
+ if data["expected"] not in ("PASS", "OK"):
+ expected_msg = "expected %s | " % data["expected"]
+ else:
+ expected_msg = ""
+ info_line = "TEST-INFO %s%s\n" % (expected_msg, duration_msg)
+
+ return failure_line + info_line
+
+ sections = ["TEST-%s" % data['status'], test_id]
+ if duration_msg:
+ sections.append(duration_msg)
+ rv.append(' | '.join(sections) + '\n')
+ return "".join(rv)
+
+ def suite_end(self, data):
+ start_time = self.suite_start_time
+ time = int((data["time"] - start_time) / 1000)
+
+ return "SUITE-END | took %is\n" % time
+
+ def test_id(self, test_id):
+ if isinstance(test_id, (str, unicode)):
+ return test_id
+ else:
+ return tuple(test_id)
+
+ @output_subtests
+ def valgrind_error(self, data):
+ rv = "TEST-UNEXPECTED-VALGRIND-ERROR | " + data['primary'] + "\n"
+ for line in data['secondary']:
+ rv = rv + line + "\n"
+
+ return rv
+
+ def lint(self, data):
+ fmt = "TEST-UNEXPECTED-{level} | {path}:{lineno}{column} | {message} ({rule})"
+ data["column"] = ":%s" % data["column"] if data["column"] else ""
+ data['rule'] = data['rule'] or data['linter'] or ""
+ return fmt.append(fmt.format(**data))
diff --git a/testing/mozbase/mozlog/mozlog/formatters/unittest.py b/testing/mozbase/mozlog/mozlog/formatters/unittest.py
new file mode 100755
index 000000000..254205eae
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/unittest.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import base
+
+
+class UnittestFormatter(base.BaseFormatter):
+ """Formatter designed to produce output in a format like that used by
+ the ``unittest`` module in the standard library."""
+
+ def __init__(self):
+ self.fails = []
+ self.errors = []
+ self.tests_run = 0
+ self.start_time = None
+ self.end_time = None
+
+ def suite_start(self, data):
+ self.start_time = data["time"]
+
+ def test_start(self, data):
+ self.tests_run += 1
+
+ def test_end(self, data):
+ char = "."
+ if "expected" in data:
+ status = data["status"]
+ char = {"FAIL": "F",
+ "ERROR": "E",
+ "PASS": "X"}[status]
+
+ if status == "FAIL":
+ self.fails.append(data)
+ elif status == "ERROR":
+ self.errors.append(data)
+
+ elif data["status"] == "SKIP":
+ char = "S"
+ return char
+
+ def suite_end(self, data):
+ self.end_time = data["time"]
+ summary = "\n".join([self.output_fails(),
+ self.output_errors(),
+ self.output_summary()])
+ return "\n%s\n" % summary
+
+ def output_fails(self):
+ return "\n".join("FAIL %(test)s\n%(message)s\n" % data
+ for data in self.fails)
+
+ def output_errors(self):
+ return "\n".join("ERROR %(test)s\n%(message)s" % data
+ for data in self.errors)
+
+ def output_summary(self):
+ return ("Ran %i tests in %.1fs" % (self.tests_run,
+ (self.end_time - self.start_time) / 1000))
diff --git a/testing/mozbase/mozlog/mozlog/formatters/xunit.py b/testing/mozbase/mozlog/mozlog/formatters/xunit.py
new file mode 100644
index 000000000..3930afa3e
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/formatters/xunit.py
@@ -0,0 +1,101 @@
+import types
+from xml.etree import ElementTree
+
+import base
+
+
+def format_test_id(test_id):
+ """Take a test id and return something that looks a bit like
+ a class path"""
+ if type(test_id) not in types.StringTypes:
+ # Not sure how to deal with reftests yet
+ raise NotImplementedError
+
+ # Turn a path into something like a class heirachy
+ return test_id.replace('.', '_').replace('/', ".")
+
+
+class XUnitFormatter(base.BaseFormatter):
+ """Formatter that produces XUnit-style XML output.
+
+ The tree is created in-memory so this formatter may be problematic
+ with very large log files.
+
+ Note that the data model isn't a perfect match. In
+ particular XUnit assumes that each test has a unittest-style
+ class name and function name, which isn't the case for us. The
+ implementation currently replaces path names with something that
+ looks like class names, but this doesn't work for test types that
+ actually produce class names, or for test types that have multiple
+ components in their test id (e.g. reftests)."""
+
+ def __init__(self):
+ self.tree = ElementTree.ElementTree()
+ self.root = None
+ self.suite_start_time = None
+ self.test_start_time = None
+
+ self.tests_run = 0
+ self.errors = 0
+ self.failures = 0
+ self.skips = 0
+
+ def suite_start(self, data):
+ self.root = ElementTree.Element("testsuite")
+ self.tree.root = self.root
+ self.suite_start_time = data["time"]
+
+ def test_start(self, data):
+ self.tests_run += 1
+ self.test_start_time = data["time"]
+
+ def _create_result(self, data):
+ test = ElementTree.SubElement(self.root, "testcase")
+ name = format_test_id(data["test"])
+ extra = data.get('extra') or {}
+ test.attrib["classname"] = extra.get('class_name') or name
+
+ if "subtest" in data:
+ test.attrib["name"] = data["subtest"]
+ # We generally don't know how long subtests take
+ test.attrib["time"] = "0"
+ else:
+ if "." in name:
+ test_name = name.rsplit(".", 1)[1]
+ else:
+ test_name = name
+ test.attrib["name"] = extra.get('method_name') or test_name
+ test.attrib["time"] = "%.2f" % ((data["time"] - self.test_start_time) / 1000.0)
+
+ if ("expected" in data and data["expected"] != data["status"]):
+ if data["status"] in ("NOTRUN", "ASSERT", "ERROR"):
+ result = ElementTree.SubElement(test, "error")
+ self.errors += 1
+ else:
+ result = ElementTree.SubElement(test, "failure")
+ self.failures += 1
+
+ result.attrib["message"] = "Expected %s, got %s" % (data["expected"], data["status"])
+ result.text = '%s\n%s' % (data.get('stack', ''), data.get('message', ''))
+
+ elif data["status"] == "SKIP":
+ result = ElementTree.SubElement(test, "skipped")
+ self.skips += 1
+
+ def test_status(self, data):
+ self._create_result(data)
+
+ def test_end(self, data):
+ self._create_result(data)
+
+ def suite_end(self, data):
+ self.root.attrib.update({"tests": str(self.tests_run),
+ "errors": str(self.errors),
+ "failures": str(self.failures),
+ "skips": str(self.skips),
+ "time": "%.2f" % (
+ (data["time"] - self.suite_start_time) / 1000.0)})
+ xml_string = ElementTree.tostring(self.root, encoding="utf8")
+ # pretty printing can not be done from xml.etree
+ from xml.dom import minidom
+ return minidom.parseString(xml_string).toprettyxml(encoding="utf8")
diff --git a/testing/mozbase/mozlog/mozlog/handlers/__init__.py b/testing/mozbase/mozlog/mozlog/handlers/__init__.py
new file mode 100644
index 000000000..7decfc780
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/handlers/__init__.py
@@ -0,0 +1,11 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .base import LogLevelFilter, StreamHandler, BaseHandler
+from .statushandler import StatusHandler
+from .bufferhandler import BufferHandler
+from .valgrindhandler import ValgrindHandler
+
+__all__ = ['LogLevelFilter', 'StreamHandler', 'BaseHandler',
+ 'StatusHandler', 'BufferHandler', 'ValgrindHandler']
diff --git a/testing/mozbase/mozlog/mozlog/handlers/base.py b/testing/mozbase/mozlog/mozlog/handlers/base.py
new file mode 100644
index 000000000..be4df21f9
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/handlers/base.py
@@ -0,0 +1,105 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from threading import Lock
+import codecs
+
+from ..structuredlog import log_levels
+
+
+class BaseHandler(object):
+ """A base handler providing message handling facilities to
+ derived classes.
+
+ :param inner: A handler-like callable that may receive messages
+ from a log user.
+ """
+
+ def __init__(self, inner):
+ self.wrapped = []
+ if hasattr(inner, "handle_message"):
+ self.wrapped.append(inner)
+ self.message_handlers = {}
+
+ def register_message_handlers(self, topic, handlers):
+ self.message_handlers[topic] = handlers
+
+ def handle_message(self, topic, cmd, *args):
+ """Handles a message for the given topic by calling a subclass-defined
+ callback for the command.
+
+ :param topic: The topic of the broadcasted message. Handlers opt-in to
+ receiving messages by identifying a topic when calling
+ register_message_handlers.
+ :param command: The command to issue. This is a string that corresponds
+ to a callback provided by the target.
+ :param arg: Arguments to pass to the identified message callback, if any.
+ """
+ rv = []
+ if topic in self.message_handlers and cmd in self.message_handlers[topic]:
+ rv.append(self.message_handlers[topic][cmd](*args))
+ for inner in self.wrapped:
+ rv.extend(inner.handle_message(topic, cmd, *args))
+ return rv
+
+
+class LogLevelFilter(BaseHandler):
+ """Handler that filters out messages with action of log and a level
+ lower than some specified level.
+
+ :param inner: Handler to use for messages that pass this filter
+ :param level: Minimum log level to process
+ """
+
+ def __init__(self, inner, level):
+ BaseHandler.__init__(self, inner)
+ self.inner = inner
+ self.level = log_levels[level.upper()]
+
+ def __call__(self, item):
+ if (item["action"] != "log" or
+ log_levels[item["level"].upper()] <= self.level):
+ return self.inner(item)
+
+
+class StreamHandler(BaseHandler):
+ """Handler for writing to a file-like object
+
+ :param stream: File-like object to write log messages to
+ :param formatter: formatter to convert messages to string format
+ """
+
+ _lock = Lock()
+
+ def __init__(self, stream, formatter):
+ BaseHandler.__init__(self, formatter)
+ assert stream is not None
+ # This is a hack to deal with the case where we are passed a
+ # StreamWriter (e.g. by mach for stdout). A StreamWriter requires
+ # the code to handle unicode in exactly the opposite way compared
+ # to a normal stream i.e. you always have to pass in a Unicode
+ # object rather than a string object. Cope with that by extracting
+ # the underlying raw stream.
+ if isinstance(stream, codecs.StreamWriter):
+ stream = stream.stream
+
+ self.formatter = formatter
+ self.stream = stream
+
+ def __call__(self, data):
+ """Write a log message.
+
+ :param data: Structured log message dictionary."""
+ formatted = self.formatter(data)
+ if not formatted:
+ return
+ with self._lock:
+ if isinstance(formatted, unicode):
+ self.stream.write(formatted.encode("utf-8", "replace"))
+ elif isinstance(formatted, str):
+ self.stream.write(formatted)
+ else:
+ assert False, "Got output from the formatter of an unexpected type"
+
+ self.stream.flush()
diff --git a/testing/mozbase/mozlog/mozlog/handlers/bufferhandler.py b/testing/mozbase/mozlog/mozlog/handlers/bufferhandler.py
new file mode 100644
index 000000000..4cb3e53ce
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/handlers/bufferhandler.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .base import BaseHandler
+
+
+class BufferHandler(BaseHandler):
+ """Handler that maintains a circular buffer of messages based on the
+ size and actions specified by a user.
+
+ :param inner: The underlying handler used to emit messages.
+ :param message_limit: The maximum number of messages to retain for
+ context. If None, the buffer will grow without limit.
+ :param buffered_actions: The set of actions to include in the buffer
+ rather than log directly.
+ """
+
+ def __init__(self, inner, message_limit=100, buffered_actions=None):
+ BaseHandler.__init__(self, inner)
+ self.inner = inner
+ self.message_limit = message_limit
+ if buffered_actions is None:
+ buffered_actions = ['log', 'test_status']
+ self.buffered_actions = set(buffered_actions)
+ self._buffering = True
+
+ if self.message_limit is not None:
+ self._buffer = [None] * self.message_limit
+ self._buffer_pos = 0
+ else:
+ self._buffer = []
+
+ self.register_message_handlers("buffer", {
+ "on": self._enable_buffering,
+ "off": self._disable_buffering,
+ "flush": self._flush_buffered,
+ "clear": self._clear_buffer,
+ })
+
+ def __call__(self, data):
+ action = data['action']
+ if 'bypass_mozlog_buffer' in data:
+ data.pop('bypass_mozlog_buffer')
+ self.inner(data)
+ return
+ if not self._buffering or action not in self.buffered_actions:
+ self.inner(data)
+ return
+
+ self._add_message(data)
+
+ def _add_message(self, data):
+ if self.message_limit is None:
+ self._buffer.append(data)
+ else:
+ self._buffer[self._buffer_pos] = data
+ self._buffer_pos = (self._buffer_pos + 1) % self.message_limit
+
+ def _enable_buffering(self):
+ self._buffering = True
+
+ def _disable_buffering(self):
+ self._buffering = False
+
+ def _clear_buffer(self):
+ """Clear the buffer of unwanted messages."""
+ current_size = len([m for m in self._buffer if m is not None])
+ if self.message_limit is not None:
+ self._buffer = [None] * self.message_limit
+ else:
+ self._buffer = []
+ return current_size
+
+ def _flush_buffered(self):
+ """Logs the contents of the current buffer"""
+ for msg in self._buffer[self._buffer_pos:]:
+ if msg is not None:
+ self.inner(msg)
+ for msg in self._buffer[:self._buffer_pos]:
+ if msg is not None:
+ self.inner(msg)
+ return self._clear_buffer()
diff --git a/testing/mozbase/mozlog/mozlog/handlers/statushandler.py b/testing/mozbase/mozlog/mozlog/handlers/statushandler.py
new file mode 100644
index 000000000..b5aeb1b53
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/handlers/statushandler.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import (
+ defaultdict,
+ namedtuple,
+)
+
+
+RunSummary = namedtuple("RunSummary",
+ ("unexpected_statuses",
+ "expected_statuses",
+ "log_level_counts",
+ "action_counts"))
+
+
+class StatusHandler(object):
+ """A handler used to determine an overall status for a test run according
+ to a sequence of log messages."""
+
+ def __init__(self):
+ # The count of each type of unexpected result status (includes tests and subtests)
+ self.unexpected_statuses = defaultdict(int)
+ # The count of each type of expected result status (includes tests and subtests)
+ self.expected_statuses = defaultdict(int)
+ # The count of actions logged
+ self.action_counts = defaultdict(int)
+ # The count of messages logged at each log level
+ self.log_level_counts = defaultdict(int)
+
+ def __call__(self, data):
+ action = data['action']
+ self.action_counts[action] += 1
+
+ if action == 'log':
+ self.log_level_counts[data['level']] += 1
+
+ if action in ('test_status', 'test_end'):
+ status = data['status']
+ if 'expected' in data:
+ self.unexpected_statuses[status] += 1
+ else:
+ self.expected_statuses[status] += 1
+
+ def summarize(self):
+ return RunSummary(
+ dict(self.unexpected_statuses),
+ dict(self.expected_statuses),
+ dict(self.log_level_counts),
+ dict(self.action_counts),
+ )
diff --git a/testing/mozbase/mozlog/mozlog/handlers/valgrindhandler.py b/testing/mozbase/mozlog/mozlog/handlers/valgrindhandler.py
new file mode 100644
index 000000000..5bedfb9ab
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/handlers/valgrindhandler.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .base import BaseHandler
+import re
+
+
+class ValgrindHandler(BaseHandler):
+
+ def __init__(self, inner):
+ BaseHandler.__init__(self, inner)
+ self.inner = inner
+ self.vFilter = ValgrindFilter()
+
+ def __call__(self, data):
+ tmp = self.vFilter(data)
+ if tmp is not None:
+ self.inner(tmp)
+
+
+class ValgrindFilter(object):
+ '''
+ A class for handling Valgrind output.
+
+ Valgrind errors look like this:
+
+ ==60741== 40 (24 direct, 16 indirect) bytes in 1 blocks are definitely lost in loss
+ record 2,746 of 5,235
+ ==60741== at 0x4C26B43: calloc (vg_replace_malloc.c:593)
+ ==60741== by 0x63AEF65: PR_Calloc (prmem.c:443)
+ ==60741== by 0x69F236E: PORT_ZAlloc_Util (secport.c:117)
+ ==60741== by 0x69F1336: SECITEM_AllocItem_Util (secitem.c:28)
+ ==60741== by 0xA04280B: ffi_call_unix64 (in /builds/slave/m-in-l64-valgrind-000000000000/objdir/toolkit/library/libxul.so) # noqa
+ ==60741== by 0xA042443: ffi_call (ffi64.c:485)
+
+ For each such error, this class extracts most or all of the first (error
+ kind) line, plus the function name in each of the first few stack entries.
+ With this data it constructs and prints a TEST-UNEXPECTED-FAIL message that
+ TBPL will highlight.
+
+ It buffers these lines from which text is extracted so that the
+ TEST-UNEXPECTED-FAIL message can be printed before the full error.
+
+ Parsing the Valgrind output isn't ideal, and it may break in the future if
+ Valgrind changes the format of the messages, or introduces new error kinds.
+ To protect against this, we also count how many lines containing
+ "<insert_a_suppression_name_here>" are seen. Thanks to the use of
+ --gen-suppressions=yes, exactly one of these lines is present per error. If
+ the count of these lines doesn't match the error count found during
+ parsing, then the parsing has missed one or more errors and we can fail
+ appropriately.
+ '''
+
+ def __init__(self):
+ # The regexps in this list match all of Valgrind's errors. Note that
+ # Valgrind is English-only, so we don't have to worry about
+ # localization.
+ self.re_error = \
+ re.compile(
+ r'==\d+== (' +
+ r'(Use of uninitialised value of size \d+)|' +
+ r'(Conditional jump or move depends on uninitialised value\(s\))|' +
+ r'(Syscall param .* contains uninitialised byte\(s\))|' +
+ r'(Syscall param .* points to (unaddressable|uninitialised) byte\(s\))|' +
+ r'((Unaddressable|Uninitialised) byte\(s\) found during client check request)|' +
+ r'(Invalid free\(\) / delete / delete\[\] / realloc\(\))|' +
+ r'(Mismatched free\(\) / delete / delete \[\])|' +
+ r'(Invalid (read|write) of size \d+)|' +
+ r'(Jump to the invalid address stated on the next line)|' +
+ r'(Source and destination overlap in .*)|' +
+ r'(.* bytes in .* blocks are .* lost)' +
+ r')'
+ )
+ # Match identifer chars, plus ':' for namespaces, and '\?' in order to
+ # match "???" which Valgrind sometimes produces.
+ self.re_stack_entry = \
+ re.compile(r'^==\d+==.*0x[A-Z0-9]+: ([A-Za-z0-9_:\?]+)')
+ self.re_suppression = \
+ re.compile(r' *<insert_a_suppression_name_here>')
+ self.error_count = 0
+ self.suppression_count = 0
+ self.number_of_stack_entries_to_get = 0
+ self.curr_failure_msg = ""
+ self.buffered_lines = []
+
+ # Takes a message and returns a message
+ def __call__(self, msg):
+ # Pass through everything that isn't plain text
+ if msg['action'] != 'log':
+ return msg
+
+ line = msg['message']
+ output_message = None
+ if self.number_of_stack_entries_to_get == 0:
+ # Look for the start of a Valgrind error.
+ m = re.search(self.re_error, line)
+ if m:
+ self.error_count += 1
+ self.number_of_stack_entries_to_get = 4
+ self.curr_failure_msg = m.group(1) + " at "
+ self.buffered_lines = [line]
+ else:
+ output_message = msg
+
+ else:
+ # We've recently found a Valgrind error, and are now extracting
+ # details from the first few stack entries.
+ self.buffered_lines.append(line)
+ m = re.match(self.re_stack_entry, line)
+ if m:
+ self.curr_failure_msg += m.group(1)
+ else:
+ self.curr_failure_msg += '?!?'
+
+ self.number_of_stack_entries_to_get -= 1
+ if self.number_of_stack_entries_to_get != 0:
+ self.curr_failure_msg += ' / '
+ else:
+ # We've finished getting the first few stack entries. Emit
+ # the failure action, comprising the primary message and the
+ # buffered lines, and then reset state. Copy the mandatory
+ # fields from the incoming message, since there's nowhere
+ # else to get them from.
+ output_message = { # Mandatory fields
+ u"action": "valgrind_error",
+ u"time": msg["time"],
+ u"thread": msg["thread"],
+ u"pid": msg["pid"],
+ u"source": msg["source"],
+ # valgrind_error specific fields
+ u"primary": self.curr_failure_msg,
+ u"secondary": self.buffered_lines}
+ self.curr_failure_msg = ""
+ self.buffered_lines = []
+
+ if re.match(self.re_suppression, line):
+ self.suppression_count += 1
+
+ return output_message
diff --git a/testing/mozbase/mozlog/mozlog/logtypes.py b/testing/mozbase/mozlog/mozlog/logtypes.py
new file mode 100644
index 000000000..d5ebff93c
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/logtypes.py
@@ -0,0 +1,204 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+convertor_registry = {}
+missing = object()
+no_default = object()
+
+
+class log_action(object):
+
+ def __init__(self, *args):
+ self.args = {}
+
+ self.args_no_default = []
+ self.args_with_default = []
+
+ # These are the required fields in a log message that usually aren't
+ # supplied by the caller, but can be in the case of log_raw
+ self.default_args = [
+ Unicode("action"),
+ Int("time"),
+ Unicode("thread"),
+ Int("pid", default=None),
+ Unicode("source"),
+ Unicode("component")]
+
+ for arg in args:
+ if arg.default is no_default:
+ self.args_no_default.append(arg.name)
+ else:
+ self.args_with_default.append(arg.name)
+
+ if arg.name in self.args:
+ raise ValueError("Repeated argument name %s" % arg.name)
+
+ self.args[arg.name] = arg
+
+ for extra in self.default_args:
+ self.args[extra.name] = extra
+
+ def __call__(self, f):
+ convertor_registry[f.__name__] = self
+ converter = self
+
+ def inner(self, *args, **kwargs):
+ data = converter.convert(*args, **kwargs)
+ return f(self, data)
+
+ if hasattr(f, '__doc__'):
+ setattr(inner, '__doc__', f.__doc__)
+
+ return inner
+
+ def convert(self, *args, **kwargs):
+ data = {}
+ values = {}
+ values.update(kwargs)
+
+ positional_no_default = [item for item in self.args_no_default if item not in values]
+
+ num_no_default = len(positional_no_default)
+
+ if len(args) < num_no_default:
+ raise TypeError("Too few arguments")
+
+ if len(args) > num_no_default + len(self.args_with_default):
+ raise TypeError("Too many arguments")
+
+ for i, name in enumerate(positional_no_default):
+ values[name] = args[i]
+
+ positional_with_default = [self.args_with_default[i]
+ for i in range(len(args) - num_no_default)]
+
+ for i, name in enumerate(positional_with_default):
+ if name in values:
+ raise TypeError("Argument %s specified twice" % name)
+ values[name] = args[i + num_no_default]
+
+ # Fill in missing arguments
+ for name in self.args_with_default:
+ if name not in values:
+ values[name] = self.args[name].default
+
+ for key, value in values.iteritems():
+ if key in self.args:
+ out_value = self.args[key](value)
+ if out_value is not missing:
+ data[key] = out_value
+ else:
+ raise TypeError("Unrecognised argument %s" % key)
+
+ return data
+
+ def convert_known(self, **kwargs):
+ known_kwargs = {name: value for name, value in kwargs.iteritems()
+ if name in self.args}
+ return self.convert(**known_kwargs)
+
+
+class DataType(object):
+
+ def __init__(self, name, default=no_default, optional=False):
+ self.name = name
+ self.default = default
+
+ if default is no_default and optional is not False:
+ raise ValueError("optional arguments require a default value")
+
+ self.optional = optional
+
+ def __call__(self, value):
+ if value == self.default:
+ if self.optional:
+ return missing
+ return self.default
+
+ try:
+ return self.convert(value)
+ except:
+ raise ValueError("Failed to convert value %s of type %s for field %s to type %s" %
+ (value, type(value).__name__, self.name, self.__class__.__name__))
+
+
+class Unicode(DataType):
+
+ def convert(self, data):
+ if isinstance(data, unicode):
+ return data
+ if isinstance(data, str):
+ return data.decode("utf8", "replace")
+ return unicode(data)
+
+
+class TestId(DataType):
+
+ def convert(self, data):
+ if isinstance(data, unicode):
+ return data
+ elif isinstance(data, bytes):
+ return data.decode("utf-8", "replace")
+ elif isinstance(data, (tuple, list)):
+ # This is really a bit of a hack; should really split out convertors from the
+ # fields they operate on
+ func = Unicode(None).convert
+ return tuple(func(item) for item in data)
+ else:
+ raise ValueError
+
+
+class Status(DataType):
+ allowed = ["PASS", "FAIL", "OK", "ERROR", "TIMEOUT", "CRASH", "ASSERT", "SKIP"]
+
+ def convert(self, data):
+ value = data.upper()
+ if value not in self.allowed:
+ raise ValueError
+ return value
+
+
+class SubStatus(Status):
+ allowed = ["PASS", "FAIL", "ERROR", "TIMEOUT", "ASSERT", "NOTRUN", "SKIP"]
+
+
+class Dict(DataType):
+
+ def convert(self, data):
+ return dict(data)
+
+
+class List(DataType):
+
+ def __init__(self, name, item_type, default=no_default, optional=False):
+ DataType.__init__(self, name, default, optional)
+ self.item_type = item_type(None)
+
+ def convert(self, data):
+ return [self.item_type.convert(item) for item in data]
+
+
+class Int(DataType):
+
+ def convert(self, data):
+ return int(data)
+
+
+class Any(DataType):
+
+ def convert(self, data):
+ return data
+
+
+class Tuple(DataType):
+
+ def __init__(self, name, item_types, default=no_default, optional=False):
+ DataType.__init__(self, name, default, optional)
+ self.item_types = item_types
+
+ def convert(self, data):
+ if len(data) != len(self.item_types):
+ raise ValueError("Expected %i items got %i" % (len(self.item_types), len(data)))
+ return tuple(item_type.convert(value)
+ for item_type, value in zip(self.item_types, data))
diff --git a/testing/mozbase/mozlog/mozlog/proxy.py b/testing/mozbase/mozlog/mozlog/proxy.py
new file mode 100644
index 000000000..44ce24225
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/proxy.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .structuredlog import get_default_logger
+
+
+class ProxyLogger(object):
+ """
+ A ProxyLogger behaves like a
+ :class:`mozlog.structuredlog.StructuredLogger`.
+
+ Each method and attribute access will be forwarded to the underlying
+ StructuredLogger.
+
+ RuntimeError will be raised when the default logger is not yet initialized.
+ """
+
+ def __init__(self, component=None):
+ self.logger = None
+ self._component = component
+
+ def __getattr__(self, name):
+ if self.logger is None:
+ self.logger = get_default_logger(component=self._component)
+ if self.logger is None:
+ raise RuntimeError("Default logger is not initialized!")
+ return getattr(self.logger, name)
+
+
+def get_proxy_logger(component=None):
+ """
+ Returns a :class:`ProxyLogger` for the given component.
+ """
+ return ProxyLogger(component)
diff --git a/testing/mozbase/mozlog/mozlog/pytest_mozlog/__init__.py b/testing/mozbase/mozlog/mozlog/pytest_mozlog/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/pytest_mozlog/__init__.py
diff --git a/testing/mozbase/mozlog/mozlog/pytest_mozlog/plugin.py b/testing/mozbase/mozlog/mozlog/pytest_mozlog/plugin.py
new file mode 100644
index 000000000..811ee38e3
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/pytest_mozlog/plugin.py
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozlog
+import os
+import time
+
+
+def pytest_addoption(parser):
+ # We can't simply use mozlog.commandline.add_logging_group(parser) here because
+ # Pytest's parser doesn't have the add_argument_group method Mozlog expects.
+ group = parser.getgroup('mozlog')
+
+ for name, (_class, _help) in mozlog.commandline.log_formatters.iteritems():
+ group.addoption('--log-{0}'.format(name), action='append', help=_help)
+
+ formatter_options = mozlog.commandline.fmt_options.iteritems()
+ for name, (_class, _help, formatters, action) in formatter_options:
+ for formatter in formatters:
+ if formatter in mozlog.commandline.log_formatters:
+ group.addoption(
+ '--log-{0}-{1}'.format(formatter, name),
+ action=action,
+ help=_help)
+
+
+def pytest_configure(config):
+ # If using pytest-xdist for parallelization, only register plugin on master process
+ if not hasattr(config, 'slaveinput'):
+ config.pluginmanager.register(MozLog())
+
+
+class MozLog(object):
+
+ def __init__(self):
+ self.results = {}
+ self.start_time = int(time.time() * 1000) # in ms for Mozlog compatibility
+
+ def format_nodeid(self, nodeid):
+ '''Helper to Reformat/shorten a "::"-separated pytest test nodeid'''
+ testfile, testname = nodeid.split("::")
+ return " ".join([os.path.basename(testfile), testname])
+
+ def pytest_configure(self, config):
+ mozlog.commandline.setup_logging('pytest', config.known_args_namespace,
+ defaults={}, allow_unused_options=True)
+ self.logger = mozlog.get_default_logger(component='pytest')
+
+ def pytest_sessionstart(self, session):
+ '''Called before test collection; records suite start time to log later'''
+ self.start_time = int(time.time() * 1000) # in ms for Mozlog compatibility
+
+ def pytest_collection_modifyitems(self, items):
+ '''Called after test collection is completed, just before tests are run (suite start)'''
+ self.logger.suite_start(tests=items, time=self.start_time)
+
+ def pytest_sessionfinish(self, session, exitstatus):
+ self.logger.suite_end()
+
+ def pytest_runtest_logstart(self, nodeid, location):
+ self.logger.test_start(test=self.format_nodeid(nodeid))
+
+ def pytest_runtest_logreport(self, report):
+ '''Called 3 times per test (setup, call, teardown), indicated by report.when'''
+ test = report.nodeid
+ status = expected = 'PASS'
+ message = stack = None
+ if hasattr(report, 'wasxfail'):
+ # Pytest reporting for xfail tests is somewhat counterinutitive:
+ # If an xfail test fails as expected, its 'call' report has .skipped,
+ # so we record status FAIL (== expected) and log an expected result.
+ # If an xfail unexpectedly passes, the 'call' report has .failed (Pytest 2)
+ # or .passed (Pytest 3), so we leave status as PASS (!= expected)
+ # to log an unexpected result.
+ expected = 'FAIL'
+ if report.skipped: # indicates expected failure (passing test)
+ status = 'FAIL'
+ elif report.failed:
+ status = 'FAIL' if report.when == 'call' else 'ERROR'
+ crash = report.longrepr.reprcrash # here longrepr is a ReprExceptionInfo
+ message = "{0} (line {1})".format(crash.message, crash.lineno)
+ stack = report.longrepr.reprtraceback
+ elif report.skipped: # indicates true skip
+ status = expected = 'SKIP'
+ message = report.longrepr[-1] # here longrepr is a tuple (file, lineno, reason)
+ if status != expected or expected != 'PASS':
+ self.results[test] = (status, expected, message, stack)
+ if report.when == 'teardown':
+ defaults = ('PASS', 'PASS', None, None)
+ status, expected, message, stack = self.results.get(test, defaults)
+ self.logger.test_end(test=self.format_nodeid(test),
+ status=status, expected=expected,
+ message=message, stack=stack)
diff --git a/testing/mozbase/mozlog/mozlog/reader.py b/testing/mozbase/mozlog/mozlog/reader.py
new file mode 100644
index 000000000..c00ec6d38
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/reader.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+
+
+def read(log_f, raise_on_error=False):
+ """Return a generator that will return the entries in a structured log file.
+ Note that the caller must not close the file whilst the generator is still
+ in use.
+
+ :param log_f: file-like object containing the raw log entries, one per line
+ :param raise_on_error: boolean indicating whether ValueError should be raised
+ for lines that cannot be decoded."""
+ while True:
+ line = log_f.readline()
+ if not line:
+ # This allows log_f to be a stream like stdout
+ break
+ try:
+ yield json.loads(line)
+ except ValueError:
+ if raise_on_error:
+ raise
+
+
+def imap_log(log_iter, action_map):
+ """Create an iterator that will invoke a callback per action for each item in a
+ iterable containing structured log entries
+
+ :param log_iter: Iterator returning structured log entries
+ :param action_map: Dictionary mapping action name to callback function. Log items
+ with actions not in this dictionary will be skipped.
+ """
+ for item in log_iter:
+ if item["action"] in action_map:
+ yield action_map[item["action"]](item)
+
+
+def each_log(log_iter, action_map):
+ """Call a callback for each item in an iterable containing structured
+ log entries
+
+ :param log_iter: Iterator returning structured log entries
+ :param action_map: Dictionary mapping action name to callback function. Log items
+ with actions not in this dictionary will be skipped.
+ """
+ for item in log_iter:
+ if item["action"] in action_map:
+ action_map[item["action"]](item)
+
+
+class LogHandler(object):
+ """Base class for objects that act as log handlers. A handler is a callable
+ that takes a log entry as the only argument.
+
+ Subclasses are expected to provide a method for each action type they
+ wish to handle, each taking a single argument for the test data.
+ For example a trivial subclass that just produces the id of each test as
+ it starts might be::
+
+ class StartIdHandler(LogHandler):
+ def test_start(data):
+ #For simplicity in the example pretend the id is always a string
+ return data["test"]
+ """
+
+ def __call__(self, data):
+ if hasattr(self, data["action"]):
+ handler = getattr(self, data["action"])
+ return handler(data)
+
+
+def handle_log(log_iter, handler):
+ """Call a handler for each item in a log, discarding the return value"""
+ for item in log_iter:
+ handler(item)
diff --git a/testing/mozbase/mozlog/mozlog/scripts/__init__.py b/testing/mozbase/mozlog/mozlog/scripts/__init__.py
new file mode 100644
index 000000000..53f9146c9
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/scripts/__init__.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+import argparse
+import unstable
+import format as formatlog
+import logmerge
+
+
+def get_parser():
+ parser = argparse.ArgumentParser("structlog",
+ description="Tools for dealing with structured logs")
+
+ commands = {"unstable": (unstable.get_parser, unstable.main),
+ "format": (formatlog.get_parser, formatlog.main),
+ "logmerge": (logmerge.get_parser, logmerge.main)}
+
+ sub_parser = parser.add_subparsers(title='Subcommands')
+
+ for command, (parser_func, main_func) in commands.iteritems():
+ parent = parser_func(False)
+ command_parser = sub_parser.add_parser(command,
+ description=parent.description,
+ parents=[parent])
+ command_parser.set_defaults(func=main_func)
+
+ return parser
+
+
+def main():
+ parser = get_parser()
+ args = parser.parse_args()
+ args.func(**vars(args))
diff --git a/testing/mozbase/mozlog/mozlog/scripts/format.py b/testing/mozbase/mozlog/mozlog/scripts/format.py
new file mode 100644
index 000000000..1644e4b95
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/scripts/format.py
@@ -0,0 +1,42 @@
+import argparse
+import sys
+
+from .. import handlers, commandline, reader
+
+
+def get_parser(add_help=True):
+ parser = argparse.ArgumentParser("format",
+ description="Format a structured log stream",
+ add_help=add_help)
+ parser.add_argument("--input", action="store", default=None,
+ help="Filename to read from, defaults to stdin")
+ parser.add_argument("--output", action="store", default=None,
+ help="Filename to write to, defaults to stdout")
+ parser.add_argument("format", choices=commandline.log_formatters.keys(),
+ help="Format to use")
+ return parser
+
+
+def main(**kwargs):
+ if kwargs["input"] is None:
+ input_file = sys.stdin
+ else:
+ input_file = open(kwargs["input"])
+ if kwargs["output"] is None:
+ output_file = sys.stdout
+ else:
+ output_file = open(kwargs["output"], "w")
+
+ formatter = commandline.log_formatters[kwargs["format"]][0]()
+
+ handler = handlers.StreamHandler(stream=output_file,
+ formatter=formatter)
+
+ for data in reader.read(input_file):
+ handler(data)
+
+if __name__ == "__main__":
+ parser = get_parser()
+ args = parser.parse_args()
+ kwargs = vars(args)
+ main(**kwargs)
diff --git a/testing/mozbase/mozlog/mozlog/scripts/logmerge.py b/testing/mozbase/mozlog/mozlog/scripts/logmerge.py
new file mode 100644
index 000000000..1185dc605
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/scripts/logmerge.py
@@ -0,0 +1,82 @@
+from __future__ import print_function
+import argparse
+import json
+import os
+import sys
+from threading import current_thread
+import time
+from mozlog.reader import read
+
+
+def dump_entry(entry, output):
+ json.dump(entry, output)
+ output.write("\n")
+
+
+def fill_process_info(event):
+ event["time"] = int(round(time.time() * 1000))
+ event["thread"] = current_thread().name
+ event["pid"] = os.getpid()
+ return event
+
+
+def process_until(reader, output, action):
+ for entry in reader:
+ if entry['action'] == action:
+ return entry
+ dump_entry(entry, output)
+
+
+def process_until_suite_start(reader, output):
+ return process_until(reader, output, "suite_start")
+
+
+def process_until_suite_end(reader, output):
+ return process_until(reader, output, "suite_end")
+
+
+def validate_start_events(events):
+ for start in events:
+ if not start['run_info'] == events[0]['run_info']:
+ print("Error: different run_info entries", file=sys.stderr)
+ sys.exit(1)
+
+
+def merge_start_events(events):
+ for start in events[1:]:
+ events[0]["tests"].extend(start["tests"])
+ return events[0]
+
+
+def get_parser(add_help=True):
+ parser = argparse.ArgumentParser(
+ "logmerge", description='Merge multiple log files.', add_help=add_help)
+ parser.add_argument('-o', dest='output', help='output file, defaults to stdout')
+ parser.add_argument('files', metavar='File', type=str, nargs='+', help='file to be merged')
+ return parser
+
+
+def main(**kwargs):
+ if kwargs["output"] is None:
+ output = sys.stdout
+ else:
+ output = open(kwargs["output"], "w")
+ readers = [read(open(filename, 'r')) for filename in kwargs["files"]]
+ start_events = [process_until_suite_start(reader, output) for reader in readers]
+ validate_start_events(start_events)
+ merged_start_event = merge_start_events(start_events)
+ dump_entry(fill_process_info(merged_start_event), output)
+
+ end_events = [process_until_suite_end(reader, output) for reader in readers]
+ dump_entry(fill_process_info(end_events[0]), output)
+
+ for reader in readers:
+ for entry in reader:
+ dump_entry(entry, output)
+
+
+if __name__ == "__main__":
+ parser = get_parser()
+ args = parser.parse_args()
+ kwargs = vars(args)
+ main(**kwargs)
diff --git a/testing/mozbase/mozlog/mozlog/scripts/unstable.py b/testing/mozbase/mozlog/mozlog/scripts/unstable.py
new file mode 100644
index 000000000..2b31bba52
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/scripts/unstable.py
@@ -0,0 +1,120 @@
+import argparse
+from collections import defaultdict
+import json
+
+from mozlog import reader
+
+
+class StatusHandler(reader.LogHandler):
+
+ def __init__(self):
+ self.run_info = None
+ self.statuses = defaultdict(lambda: defaultdict(
+ lambda: defaultdict(lambda: defaultdict(int))))
+
+ def test_id(self, test):
+ if type(test) in (str, unicode):
+ return test
+ else:
+ return tuple(test)
+
+ def suite_start(self, item):
+ self.run_info = tuple(sorted(item.get("run_info", {}).items()))
+
+ def test_status(self, item):
+ self.statuses[self.run_info][self.test_id(item["test"])][item["subtest"]][
+ item["status"]] += 1
+
+ def test_end(self, item):
+ self.statuses[self.run_info][self.test_id(item["test"])][None][item["status"]] += 1
+
+ def suite_end(self, item):
+ self.run_info = None
+
+
+def get_statuses(filenames):
+ handler = StatusHandler()
+
+ for filename in filenames:
+ with open(filename) as f:
+ reader.handle_log(reader.read(f), handler)
+
+ return handler.statuses
+
+
+def _filter(results_cmp):
+ def inner(statuses):
+ rv = defaultdict(lambda: defaultdict(dict))
+
+ for run_info, tests in statuses.iteritems():
+ for test, subtests in tests.iteritems():
+ for name, results in subtests.iteritems():
+ if results_cmp(results):
+ rv[run_info][test][name] = results
+
+ return rv
+ return inner
+
+filter_unstable = _filter(lambda x: len(x) > 1)
+filter_stable = _filter(lambda x: len(x) == 1)
+
+
+def group_results(data):
+ rv = defaultdict(lambda: defaultdict(lambda: defaultdict(int)))
+
+ for run_info, tests in data.iteritems():
+ for test, subtests in tests.iteritems():
+ for name, results in subtests.iteritems():
+ for status, number in results.iteritems():
+ rv[test][name][status] += number
+ return rv
+
+
+def print_results(data):
+ for run_info, tests in data.iteritems():
+ run_str = " ".join("%s:%s" % (k, v) for k, v in run_info) if run_info else "No Run Info"
+ print run_str
+ print "=" * len(run_str)
+ print_run(tests)
+
+
+def print_run(tests):
+ for test, subtests in sorted(tests.items()):
+ print "\n" + str(test)
+ print "-" * len(test)
+ for name, results in subtests.iteritems():
+ print "[%s]: %s" % (name if name is not None else "",
+ " ".join("%s (%i)" % (k, v) for k, v in results.iteritems()))
+
+
+def get_parser(add_help=True):
+ parser = argparse.ArgumentParser("unstable",
+ description="List tests that don't give consistent "
+ "results from one or more runs.", add_help=add_help)
+ parser.add_argument("--json", action="store_true", default=False,
+ help="Output in JSON format")
+ parser.add_argument("--group", action="store_true", default=False,
+ help="Group results from different run types")
+ parser.add_argument("log_file", nargs="+",
+ help="Log files to read")
+ return parser
+
+
+def main(**kwargs):
+ unstable = filter_unstable(get_statuses(kwargs["log_file"]))
+ if kwargs["group"]:
+ unstable = group_results(unstable)
+
+ if kwargs["json"]:
+ print json.dumps(unstable)
+ else:
+ if not kwargs["group"]:
+ print_results(unstable)
+ else:
+ print_run(unstable)
+
+if __name__ == "__main__":
+ parser = get_parser()
+ args = parser.parse_args()
+ kwargs = vars(args)
+ main(**kwargs)
diff --git a/testing/mozbase/mozlog/mozlog/stdadapter.py b/testing/mozbase/mozlog/mozlog/stdadapter.py
new file mode 100644
index 000000000..fc967c0a6
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/stdadapter.py
@@ -0,0 +1,45 @@
+import logging
+
+from structuredlog import StructuredLogger, log_levels
+
+
+class UnstructuredHandler(logging.Handler):
+
+ def __init__(self, name=None, level=logging.NOTSET):
+ self.structured = StructuredLogger(name)
+ logging.Handler.__init__(self, level=level)
+
+ def emit(self, record):
+ if record.levelname in log_levels:
+ log_func = getattr(self.structured, record.levelname.lower())
+ else:
+ log_func = self.logger.debug
+ log_func(record.msg)
+
+ def handle(self, record):
+ self.emit(record)
+
+
+class LoggingWrapper(object):
+
+ def __init__(self, wrapped):
+ self.wrapped = wrapped
+ self.wrapped.addHandler(UnstructuredHandler(self.wrapped.name,
+ logging.getLevelName(self.wrapped.level)))
+
+ def add_handler(self, handler):
+ self.addHandler(handler)
+
+ def remove_handler(self, handler):
+ self.removeHandler(handler)
+
+ def __getattr__(self, name):
+ return getattr(self.wrapped, name)
+
+
+def std_logging_adapter(logger):
+ """Adapter for stdlib logging so that it produces structured
+ messages rather than standard logging messages
+
+ :param logger: logging.Logger to wrap"""
+ return LoggingWrapper(logger)
diff --git a/testing/mozbase/mozlog/mozlog/structuredlog.py b/testing/mozbase/mozlog/mozlog/structuredlog.py
new file mode 100644
index 000000000..fb41f2112
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/structuredlog.py
@@ -0,0 +1,521 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+from multiprocessing import current_process
+from threading import current_thread, Lock
+import json
+import sys
+import time
+import traceback
+
+from logtypes import Unicode, TestId, Status, SubStatus, Dict, List, Int, Any, Tuple
+from logtypes import log_action, convertor_registry
+
+"""Structured Logging for recording test results.
+
+Allowed actions, and subfields:
+ suite_start
+ tests - List of test names
+
+ suite_end
+
+ test_start
+ test - ID for the test
+ path - Relative path to test (optional)
+
+ test_end
+ test - ID for the test
+ status [PASS | FAIL | OK | ERROR |
+ TIMEOUT | CRASH | ASSERT | SKIP] - test status
+ expected [As for status] - Status that the test was expected to get,
+ or absent if the test got the expected status
+ extra - Dictionary of harness-specific extra information e.g. debug info
+
+ test_status
+ test - ID for the test
+ subtest - Name of the subtest
+ status [PASS | FAIL | TIMEOUT | NOTRUN | SKIP] - test status
+ expected [As for status] - Status that the subtest was expected to get,
+ or absent if the subtest got the expected status
+
+ process_output
+ process - PID of the process
+ command - Command line of the process
+ data - Output data from the process
+
+ log
+ level [CRITICAL | ERROR | WARNING |
+ INFO | DEBUG] - level of the logging message
+ message - Message to log
+
+Subfields for all messages:
+ action - the action type of the current message
+ time - the timestamp in ms since the epoch of the log message
+ thread - name for the thread emitting the message
+ pid - id of the python process in which the logger is running
+ source - name for the source emitting the message
+ component - name of the subcomponent emitting the message
+"""
+
+_default_logger_name = None
+
+
+def get_default_logger(component=None):
+ """Gets the default logger if available, optionally tagged with component
+ name. Will return None if not yet set
+
+ :param component: The component name to tag log messages with
+ """
+ global _default_logger_name
+
+ if not _default_logger_name:
+ return None
+
+ return StructuredLogger(_default_logger_name, component=component)
+
+
+def set_default_logger(default_logger):
+ """Sets the default logger to logger.
+
+ It can then be retrieved with :py:func:`get_default_logger`
+
+ Note that :py:func:`~mozlog.commandline.setup_logging` will
+ set a default logger for you, so there should be no need to call this
+ function if you're using setting up logging that way (recommended).
+
+ :param default_logger: The logger to set to default.
+ """
+ global _default_logger_name
+
+ _default_logger_name = default_logger.name
+
+log_levels = dict((k.upper(), v) for v, k in
+ enumerate(["critical", "error", "warning", "info", "debug"]))
+
+lint_levels = ["ERROR", "WARNING"]
+
+
+def log_actions():
+ """Returns the set of actions implemented by mozlog."""
+ return set(convertor_registry.keys())
+
+
+class LoggerState(object):
+
+ def __init__(self):
+ self.handlers = []
+ self.running_tests = set()
+ self.suite_started = False
+ self.component_states = {}
+
+
+class ComponentState(object):
+
+ def __init__(self):
+ self.filter_ = None
+
+
+class StructuredLogger(object):
+ _lock = Lock()
+ _logger_states = {}
+ """Create a structured logger with the given name
+
+ :param name: The name of the logger.
+ :param component: A subcomponent that the logger belongs to (typically a library name)
+ """
+
+ def __init__(self, name, component=None):
+ self.name = name
+ self.component = component
+
+ with self._lock:
+ if name not in self._logger_states:
+ self._logger_states[name] = LoggerState()
+
+ if component not in self._logger_states[name].component_states:
+ self._logger_states[name].component_states[component] = ComponentState()
+
+ self._state = self._logger_states[name]
+ self._component_state = self._state.component_states[component]
+
+ def add_handler(self, handler):
+ """Add a handler to the current logger"""
+ self._state.handlers.append(handler)
+
+ def remove_handler(self, handler):
+ """Remove a handler from the current logger"""
+ self._state.handlers.remove(handler)
+
+ def send_message(self, topic, command, *args):
+ """Send a message to each handler configured for this logger. This
+ part of the api is useful to those users requiring dynamic control
+ of a handler's behavior.
+
+ :param topic: The name used by handlers to subscribe to a message.
+ :param command: The name of the command to issue.
+ :param args: Any arguments known to the target for specialized
+ behavior.
+ """
+ rv = []
+ for handler in self._state.handlers:
+ if hasattr(handler, "handle_message"):
+ rv += handler.handle_message(topic, command, *args)
+ return rv
+
+ @property
+ def handlers(self):
+ """A list of handlers that will be called when a
+ message is logged from this logger"""
+ return self._state.handlers
+
+ @property
+ def component_filter(self):
+ return self._component_state.filter_
+
+ @component_filter.setter
+ def component_filter(self, value):
+ self._component_state.filter_ = value
+
+ def log_raw(self, raw_data):
+ if "action" not in raw_data:
+ raise ValueError
+
+ action = raw_data["action"]
+ converted_data = convertor_registry[action].convert_known(**raw_data)
+ for k, v in raw_data.iteritems():
+ if k not in converted_data:
+ converted_data[k] = v
+
+ data = self._make_log_data(action, converted_data)
+
+ if action in ("test_status", "test_end"):
+ if (data["expected"] == data["status"] or
+ data["status"] == "SKIP" or
+ "expected" not in raw_data):
+ del data["expected"]
+
+ if not self._ensure_suite_state(action, data):
+ return
+
+ self._handle_log(data)
+
+ def _log_data(self, action, data=None):
+ if data is None:
+ data = {}
+
+ log_data = self._make_log_data(action, data)
+ self._handle_log(log_data)
+
+ def _handle_log(self, data):
+ with self._lock:
+ if self.component_filter:
+ data = self.component_filter(data)
+ if data is None:
+ return
+
+ for handler in self.handlers:
+ try:
+ handler(data)
+ except Exception:
+ # Write the exception details directly to stderr because
+ # log() would call this method again which is currently locked.
+ print >> sys.__stderr__, '%s: Failure calling log handler:' % __name__
+ print >> sys.__stderr__, traceback.format_exc()
+
+ def _make_log_data(self, action, data):
+ all_data = {"action": action,
+ "time": int(time.time() * 1000),
+ "thread": current_thread().name,
+ "pid": current_process().pid,
+ "source": self.name}
+ if self.component:
+ all_data['component'] = self.component
+ all_data.update(data)
+ return all_data
+
+ def _ensure_suite_state(self, action, data):
+ if action == 'suite_start':
+ if self._state.suite_started:
+ self.error("Got second suite_start message before suite_end. " +
+ "Logged with data: {}".format(json.dumps(data)))
+ return False
+ self._state.suite_started = True
+ elif action == 'suite_end':
+ if not self._state.suite_started:
+ self.error("Got suite_end message before suite_start. " +
+ "Logged with data: {}".format(json.dumps(data)))
+ return False
+ self._state.suite_started = False
+ return True
+
+ @log_action(List("tests", Unicode),
+ Dict("run_info", default=None, optional=True),
+ Dict("version_info", default=None, optional=True),
+ Dict("device_info", default=None, optional=True),
+ Dict("extra", default=None, optional=True))
+ def suite_start(self, data):
+ """Log a suite_start message
+
+ :param list tests: Test identifiers that will be run in the suite.
+ :param dict run_info: Optional information typically provided by mozinfo.
+ :param dict version_info: Optional target application version information provided
+ by mozversion.
+ :param dict device_info: Optional target device information provided by mozdevice.
+ """
+ if not self._ensure_suite_state('suite_start', data):
+ return
+
+ self._log_data("suite_start", data)
+
+ @log_action(Dict("extra", default=None, optional=True))
+ def suite_end(self, data):
+ """Log a suite_end message"""
+ if not self._ensure_suite_state('suite_end', data):
+ return
+
+ self._log_data("suite_end")
+
+ @log_action(TestId("test"),
+ Unicode("path", default=None, optional=True))
+ def test_start(self, data):
+ """Log a test_start message
+
+ :param test: Identifier of the test that will run.
+ :param path: Path to test relative to some base (typically the root of
+ the source tree).
+ """
+ if not self._state.suite_started:
+ self.error("Got test_start message before suite_start for test %s" %
+ data["test"])
+ return
+ if data["test"] in self._state.running_tests:
+ self.error("test_start for %s logged while in progress." %
+ data["test"])
+ return
+ self._state.running_tests.add(data["test"])
+ self._log_data("test_start", data)
+
+ @log_action(TestId("test"),
+ Unicode("subtest"),
+ SubStatus("status"),
+ SubStatus("expected", default="PASS"),
+ Unicode("message", default=None, optional=True),
+ Unicode("stack", default=None, optional=True),
+ Dict("extra", default=None, optional=True))
+ def test_status(self, data):
+ """
+ Log a test_status message indicating a subtest result. Tests that
+ do not have subtests are not expected to produce test_status messages.
+
+ :param test: Identifier of the test that produced the result.
+ :param subtest: Name of the subtest.
+ :param status: Status string indicating the subtest result
+ :param expected: Status string indicating the expected subtest result.
+ :param message: String containing a message associated with the result.
+ :param stack: a stack trace encountered during test execution.
+ :param extra: suite-specific data associated with the test result.
+ """
+
+ if (data["expected"] == data["status"] or
+ data["status"] == "SKIP"):
+ del data["expected"]
+
+ if data["test"] not in self._state.running_tests:
+ self.error("test_status for %s logged while not in progress. "
+ "Logged with data: %s" % (data["test"], json.dumps(data)))
+ return
+
+ self._log_data("test_status", data)
+
+ @log_action(TestId("test"),
+ Status("status"),
+ Status("expected", default="OK"),
+ Unicode("message", default=None, optional=True),
+ Unicode("stack", default=None, optional=True),
+ Dict("extra", default=None, optional=True))
+ def test_end(self, data):
+ """
+ Log a test_end message indicating that a test completed. For tests
+ with subtests this indicates whether the overall test completed without
+ errors. For tests without subtests this indicates the test result
+ directly.
+
+ :param test: Identifier of the test that produced the result.
+ :param status: Status string indicating the test result
+ :param expected: Status string indicating the expected test result.
+ :param message: String containing a message associated with the result.
+ :param stack: a stack trace encountered during test execution.
+ :param extra: suite-specific data associated with the test result.
+ """
+
+ if (data["expected"] == data["status"] or
+ data["status"] == "SKIP"):
+ del data["expected"]
+
+ if data["test"] not in self._state.running_tests:
+ self.error("test_end for %s logged while not in progress. "
+ "Logged with data: %s" % (data["test"], json.dumps(data)))
+ else:
+ self._state.running_tests.remove(data["test"])
+ self._log_data("test_end", data)
+
+ @log_action(Unicode("process"),
+ Unicode("data"),
+ Unicode("command", default=None, optional=True))
+ def process_output(self, data):
+ """Log output from a managed process.
+
+ :param process: A unique identifier for the process producing the output
+ (typically the pid)
+ :param data: The output to log
+ :param command: A string representing the full command line used to start
+ the process.
+ """
+ self._log_data("process_output", data)
+
+ @log_action(Unicode("process", default=None),
+ Unicode("signature", default="[Unknown]"),
+ TestId("test", default=None, optional=True),
+ Unicode("minidump_path", default=None, optional=True),
+ Unicode("minidump_extra", default=None, optional=True),
+ Int("stackwalk_retcode", default=None, optional=True),
+ Unicode("stackwalk_stdout", default=None, optional=True),
+ Unicode("stackwalk_stderr", default=None, optional=True),
+ List("stackwalk_errors", Unicode, default=None))
+ def crash(self, data):
+ if data["stackwalk_errors"] is None:
+ data["stackwalk_errors"] = []
+
+ self._log_data("crash", data)
+
+ @log_action(Unicode("primary", default=None),
+ List("secondary", Unicode, default=None))
+ def valgrind_error(self, data):
+ self._log_data("valgrind_error", data)
+
+ @log_action(Unicode("process"),
+ Unicode("command", default=None, optional=True))
+ def process_start(self, data):
+ """Log start event of a process.
+
+ :param process: A unique identifier for the process producing the
+ output (typically the pid)
+ :param command: A string representing the full command line used to
+ start the process.
+ """
+ self._log_data("process_start", data)
+
+ @log_action(Unicode("process"),
+ Int("exitcode"),
+ Unicode("command", default=None, optional=True))
+ def process_exit(self, data):
+ """Log exit event of a process.
+
+ :param process: A unique identifier for the process producing the
+ output (typically the pid)
+ :param exitcode: the exit code
+ :param command: A string representing the full command line used to
+ start the process.
+ """
+ self._log_data("process_exit", data)
+
+
+def _log_func(level_name):
+ @log_action(Unicode("message"),
+ Any("exc_info", default=False))
+ def log(self, data):
+ exc_info = data.pop("exc_info", None)
+ if exc_info:
+ if not isinstance(exc_info, tuple):
+ exc_info = sys.exc_info()
+ if exc_info != (None, None, None):
+ bt = traceback.format_exception(*exc_info)
+ data["stack"] = u"\n".join(bt)
+
+ data["level"] = level_name
+ self._log_data("log", data)
+
+ log.__doc__ = """Log a message with level %s
+
+:param message: The string message to log
+:param exc_info: Either a boolean indicating whether to include a traceback
+ derived from sys.exc_info() or a three-item tuple in the
+ same format as sys.exc_info() containing exception information
+ to log.
+""" % level_name
+ log.__name__ = str(level_name).lower()
+ return log
+
+
+def _lint_func(level_name):
+ @log_action(Unicode("path"),
+ Unicode("message", default=""),
+ Int("lineno", default=0),
+ Int("column", default=None, optional=True),
+ Unicode("hint", default=None, optional=True),
+ Unicode("source", default=None, optional=True),
+ Unicode("rule", default=None, optional=True),
+ Tuple("lineoffset", (Int, Int), default=None, optional=True),
+ Unicode("linter", default=None, optional=True))
+ def lint(self, data):
+ data["level"] = level_name
+ self._log_data("lint", data)
+ lint.__doc__ = """Log an error resulting from a failed lint check
+
+ :param linter: name of the linter that flagged this error
+ :param path: path to the file containing the error
+ :param message: text describing the error
+ :param lineno: line number that contains the error
+ :param column: column containing the error
+ :param hint: suggestion for fixing the error (optional)
+ :param source: source code context of the error (optional)
+ :param rule: name of the rule that was violated (optional)
+ :param lineoffset: denotes an error spans multiple lines, of the form
+ (<lineno offset>, <num lines>) (optional)
+ """
+ lint.__name__ = str("lint_%s" % level_name)
+ return lint
+
+
+# Create all the methods on StructuredLog for log/lint levels
+for level_name in log_levels:
+ setattr(StructuredLogger, level_name.lower(), _log_func(level_name))
+
+for level_name in lint_levels:
+ level_name = level_name.lower()
+ name = "lint_%s" % level_name
+ setattr(StructuredLogger, name, _lint_func(level_name))
+
+
+class StructuredLogFileLike(object):
+ """Wrapper for file-like objects to redirect writes to logger
+ instead. Each call to `write` becomes a single log entry of type `log`.
+
+ When using this it is important that the callees i.e. the logging
+ handlers do not themselves try to write to the wrapped file as this
+ will cause infinite recursion.
+
+ :param logger: `StructuredLogger` to which to redirect the file write operations.
+ :param level: log level to use for each write.
+ :param prefix: String prefix to prepend to each log entry.
+ """
+
+ def __init__(self, logger, level="info", prefix=None):
+ self.logger = logger
+ self.log_func = getattr(self.logger, level)
+ self.prefix = prefix
+
+ def write(self, data):
+ if data.endswith("\n"):
+ data = data[:-1]
+ if data.endswith("\r"):
+ data = data[:-1]
+ if self.prefix is not None:
+ data = "%s: %s" % (self.prefix, data)
+ self.log_func(data)
+
+ def flush(self):
+ pass
diff --git a/testing/mozbase/mozlog/mozlog/unstructured/__init__.py b/testing/mozbase/mozlog/mozlog/unstructured/__init__.py
new file mode 100644
index 000000000..d21322b0e
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/unstructured/__init__.py
@@ -0,0 +1,8 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .logger import *
+from .loglistener import LogMessageServer
+from .loggingmixin import LoggingMixin
diff --git a/testing/mozbase/mozlog/mozlog/unstructured/logger.py b/testing/mozbase/mozlog/mozlog/unstructured/logger.py
new file mode 100644
index 000000000..2f026ae55
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/unstructured/logger.py
@@ -0,0 +1,185 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from logging import getLogger as getSysLogger
+from logging import *
+# Some of the build slave environments don't see the following when doing
+# 'from logging import *'
+# see https://bugzilla.mozilla.org/show_bug.cgi?id=700415#c35
+from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig
+import json
+
+_default_level = INFO
+_LoggerClass = getLoggerClass()
+
+# Define mozlog specific log levels
+START = _default_level + 1
+END = _default_level + 2
+PASS = _default_level + 3
+KNOWN_FAIL = _default_level + 4
+FAIL = _default_level + 5
+CRASH = _default_level + 6
+# Define associated text of log levels
+addLevelName(START, 'TEST-START')
+addLevelName(END, 'TEST-END')
+addLevelName(PASS, 'TEST-PASS')
+addLevelName(KNOWN_FAIL, 'TEST-KNOWN-FAIL')
+addLevelName(FAIL, 'TEST-UNEXPECTED-FAIL')
+addLevelName(CRASH, 'PROCESS-CRASH')
+
+
+class MozLogger(_LoggerClass):
+ """
+ MozLogger class which adds some convenience log levels
+ related to automated testing in Mozilla and ability to
+ output structured log messages.
+ """
+
+ def testStart(self, message, *args, **kwargs):
+ """Logs a test start message"""
+ self.log(START, message, *args, **kwargs)
+
+ def testEnd(self, message, *args, **kwargs):
+ """Logs a test end message"""
+ self.log(END, message, *args, **kwargs)
+
+ def testPass(self, message, *args, **kwargs):
+ """Logs a test pass message"""
+ self.log(PASS, message, *args, **kwargs)
+
+ def testFail(self, message, *args, **kwargs):
+ """Logs a test fail message"""
+ self.log(FAIL, message, *args, **kwargs)
+
+ def testKnownFail(self, message, *args, **kwargs):
+ """Logs a test known fail message"""
+ self.log(KNOWN_FAIL, message, *args, **kwargs)
+
+ def processCrash(self, message, *args, **kwargs):
+ """Logs a process crash message"""
+ self.log(CRASH, message, *args, **kwargs)
+
+ def log_structured(self, action, params=None):
+ """Logs a structured message object."""
+ if params is None:
+ params = {}
+
+ level = params.get('_level', _default_level)
+ if isinstance(level, int):
+ params['_level'] = getLevelName(level)
+ else:
+ params['_level'] = level
+ level = getLevelName(level.upper())
+
+ # If the logger is fed a level number unknown to the logging
+ # module, getLevelName will return a string. Unfortunately,
+ # the logging module will raise a type error elsewhere if
+ # the level is not an integer.
+ if not isinstance(level, int):
+ level = _default_level
+
+ params['action'] = action
+
+ # The can message be None. This is expected, and shouldn't cause
+ # unstructured formatters to fail.
+ message = params.get('_message')
+
+ self.log(level, message, extra={'params': params})
+
+
+class JSONFormatter(Formatter):
+ """Log formatter for emitting structured JSON entries."""
+
+ def format(self, record):
+ # Default values determined by logger metadata
+ output = {
+ '_time': int(round(record.created * 1000, 0)),
+ '_namespace': record.name,
+ '_level': getLevelName(record.levelno),
+ }
+
+ # If this message was created by a call to log_structured,
+ # anything specified by the caller's params should act as
+ # an override.
+ output.update(getattr(record, 'params', {}))
+
+ if record.msg and output.get('_message') is None:
+ # For compatibility with callers using the printf like
+ # API exposed by python logging, call the default formatter.
+ output['_message'] = Formatter.format(self, record)
+
+ return json.dumps(output, indent=output.get('indent'))
+
+
+class MozFormatter(Formatter):
+ """
+ MozFormatter class used to standardize formatting
+ If a different format is desired, this can be explicitly
+ overriden with the log handler's setFormatter() method
+ """
+ level_length = 0
+ max_level_length = len('TEST-START')
+
+ def __init__(self, include_timestamp=False):
+ """
+ Formatter.__init__ has fmt and datefmt parameters that won't have
+ any affect on a MozFormatter instance.
+
+ :param include_timestamp: if True, include formatted time at the
+ beginning of the message
+ """
+ self.include_timestamp = include_timestamp
+ Formatter.__init__(self)
+
+ def format(self, record):
+ # Handles padding so record levels align nicely
+ if len(record.levelname) > self.level_length:
+ pad = 0
+ if len(record.levelname) <= self.max_level_length:
+ self.level_length = len(record.levelname)
+ else:
+ pad = self.level_length - len(record.levelname) + 1
+ sep = '|'.rjust(pad)
+ fmt = '%(name)s %(levelname)s ' + sep + ' %(message)s'
+ if self.include_timestamp:
+ fmt = '%(asctime)s ' + fmt
+ # this protected member is used to define the format
+ # used by the base Formatter's method
+ self._fmt = fmt
+ return Formatter.format(self, record)
+
+
+def getLogger(name, handler=None):
+ """
+ Returns the logger with the specified name.
+ If the logger doesn't exist, it is created.
+ If handler is specified, adds it to the logger. Otherwise a default handler
+ that logs to standard output will be used.
+
+ :param name: The name of the logger to retrieve
+ :param handler: A handler to add to the logger. If the logger already exists,
+ and a handler is specified, an exception will be raised. To
+ add a handler to an existing logger, call that logger's
+ addHandler method.
+ """
+ setLoggerClass(MozLogger)
+
+ if name in Logger.manager.loggerDict:
+ if handler:
+ raise ValueError('The handler parameter requires ' +
+ 'that a logger by this name does ' +
+ 'not already exist')
+ return Logger.manager.loggerDict[name]
+
+ logger = getSysLogger(name)
+ logger.setLevel(_default_level)
+
+ if handler is None:
+ handler = StreamHandler()
+ handler.setFormatter(MozFormatter())
+
+ logger.addHandler(handler)
+ logger.propagate = False
+ return logger
diff --git a/testing/mozbase/mozlog/mozlog/unstructured/loggingmixin.py b/testing/mozbase/mozlog/mozlog/unstructured/loggingmixin.py
new file mode 100644
index 000000000..41a67cf09
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/unstructured/loggingmixin.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .logger import (
+ Logger,
+ getLogger,
+)
+
+
+class LoggingMixin(object):
+ """Expose a subset of logging functions to an inheriting class."""
+
+ def set_logger(self, logger_instance=None, name=None):
+ """Method for setting the underlying logger instance to be used."""
+
+ if logger_instance and not isinstance(logger_instance, Logger):
+ raise ValueError("logger_instance must be an instance of Logger")
+
+ if name is None:
+ name = ".".join([self.__module__, self.__class__.__name__])
+
+ self._logger = logger_instance or getLogger(name)
+
+ def _log_msg(self, cmd, *args, **kwargs):
+ if not hasattr(self, "_logger"):
+ self._logger = getLogger(".".join([self.__module__,
+ self.__class__.__name__]))
+ getattr(self._logger, cmd)(*args, **kwargs)
+
+ def log(self, *args, **kwargs):
+ self._log_msg("log", *args, **kwargs)
+
+ def info(self, *args, **kwargs):
+ self._log_msg("info", *args, **kwargs)
+
+ def error(self, *args, **kwargs):
+ self._log_msg("error", *args, **kwargs)
+
+ def warn(self, *args, **kwargs):
+ self._log_msg("warn", *args, **kwargs)
+
+ def log_structured(self, *args, **kwargs):
+ self._log_msg("log_structured", *args, **kwargs)
diff --git a/testing/mozbase/mozlog/mozlog/unstructured/loglistener.py b/testing/mozbase/mozlog/mozlog/unstructured/loglistener.py
new file mode 100644
index 000000000..d56730180
--- /dev/null
+++ b/testing/mozbase/mozlog/mozlog/unstructured/loglistener.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import SocketServer
+import socket
+import json
+
+
+class LogMessageServer(SocketServer.TCPServer):
+
+ def __init__(self, server_address, logger, message_callback=None, timeout=3):
+ SocketServer.TCPServer.__init__(self, server_address, LogMessageHandler)
+ self._logger = logger
+ self._message_callback = message_callback
+ self.timeout = timeout
+
+
+class LogMessageHandler(SocketServer.BaseRequestHandler):
+ """Processes output from a connected log source, logging to an
+ existing logger upon receipt of a well-formed log messsage."""
+
+ def handle(self):
+ """Continually listens for log messages."""
+ self._partial_message = ''
+ self.request.settimeout(self.server.timeout)
+
+ while True:
+ try:
+ data = self.request.recv(1024)
+ if not data:
+ return
+ self.process_message(data)
+ except socket.timeout:
+ return
+
+ def process_message(self, data):
+ """Processes data from a connected log source. Messages are assumed
+ to be newline delimited, and generally well-formed JSON."""
+ for part in data.split('\n'):
+ msg_string = self._partial_message + part
+ try:
+ msg = json.loads(msg_string)
+ self._partial_message = ''
+ self.server._logger.log_structured(msg.get('action', 'UNKNOWN'), msg)
+ if self.server._message_callback:
+ self.server._message_callback()
+
+ except ValueError:
+ self._partial_message = msg_string
diff --git a/testing/mozbase/mozlog/setup.py b/testing/mozbase/mozlog/setup.py
new file mode 100644
index 000000000..659bb206e
--- /dev/null
+++ b/testing/mozbase/mozlog/setup.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup, find_packages
+
+PACKAGE_NAME = 'mozlog'
+PACKAGE_VERSION = '3.4'
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Robust log handling specialized for logging in the Mozilla universe",
+ long_description="see http://mozbase.readthedocs.org/",
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL 1.1/GPL 2.0/LGPL 2.1',
+ packages=find_packages(),
+ zip_safe=False,
+ install_requires=["blessings>=1.3"],
+ tests_require=['mozfile'],
+ platforms=['Any'],
+ classifiers=['Development Status :: 4 - Beta',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)',
+ 'Operating System :: OS Independent',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
+ package_data={"mozlog": ["formatters/html/main.js",
+ "formatters/html/style.css"]},
+ entry_points={
+ "console_scripts": [
+ "structlog = mozlog.scripts:main"
+ ],
+ 'pytest11': [
+ 'mozlog = mozlog.pytest_mozlog.plugin',
+ ]}
+ )
diff --git a/testing/mozbase/mozlog/tests/manifest.ini b/testing/mozbase/mozlog/tests/manifest.ini
new file mode 100644
index 000000000..62331ee30
--- /dev/null
+++ b/testing/mozbase/mozlog/tests/manifest.ini
@@ -0,0 +1,2 @@
+[test_logger.py]
+[test_structured.py]
diff --git a/testing/mozbase/mozlog/tests/test_logger.py b/testing/mozbase/mozlog/tests/test_logger.py
new file mode 100644
index 000000000..30f4eb569
--- /dev/null
+++ b/testing/mozbase/mozlog/tests/test_logger.py
@@ -0,0 +1,264 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import datetime
+import json
+import socket
+import threading
+import time
+import unittest
+
+import mozfile
+
+import mozlog.unstructured as mozlog
+
+
+class ListHandler(mozlog.Handler):
+ """Mock handler appends messages to a list for later inspection."""
+
+ def __init__(self):
+ mozlog.Handler.__init__(self)
+ self.messages = []
+
+ def emit(self, record):
+ self.messages.append(self.format(record))
+
+
+class TestLogging(unittest.TestCase):
+ """Tests behavior of basic mozlog api."""
+
+ def test_logger_defaults(self):
+ """Tests the default logging format and behavior."""
+
+ default_logger = mozlog.getLogger('default.logger')
+ self.assertEqual(default_logger.name, 'default.logger')
+ self.assertEqual(len(default_logger.handlers), 1)
+ self.assertTrue(isinstance(default_logger.handlers[0],
+ mozlog.StreamHandler))
+
+ f = mozfile.NamedTemporaryFile()
+ list_logger = mozlog.getLogger('file.logger',
+ handler=mozlog.FileHandler(f.name))
+ self.assertEqual(len(list_logger.handlers), 1)
+ self.assertTrue(isinstance(list_logger.handlers[0],
+ mozlog.FileHandler))
+ f.close()
+
+ self.assertRaises(ValueError, mozlog.getLogger,
+ 'file.logger', handler=ListHandler())
+
+ def test_timestamps(self):
+ """Verifies that timestamps are included when asked for."""
+ log_name = 'test'
+ handler = ListHandler()
+ handler.setFormatter(mozlog.MozFormatter())
+ log = mozlog.getLogger(log_name, handler=handler)
+ log.info('no timestamp')
+ self.assertTrue(handler.messages[-1].startswith('%s ' % log_name))
+ handler.setFormatter(mozlog.MozFormatter(include_timestamp=True))
+ log.info('timestamp')
+ # Just verify that this raises no exceptions.
+ datetime.datetime.strptime(handler.messages[-1][:23],
+ '%Y-%m-%d %H:%M:%S,%f')
+
+
+class TestStructuredLogging(unittest.TestCase):
+ """Tests structured output in mozlog."""
+
+ def setUp(self):
+ self.handler = ListHandler()
+ self.handler.setFormatter(mozlog.JSONFormatter())
+ self.logger = mozlog.MozLogger('test.Logger')
+ self.logger.addHandler(self.handler)
+ self.logger.setLevel(mozlog.DEBUG)
+
+ def check_messages(self, expected, actual):
+ """Checks actual for equality with corresponding fields in actual.
+ The actual message should contain all fields in expected, and
+ should be identical, with the exception of the timestamp field.
+ The actual message should contain no fields other than the timestamp
+ field and those present in expected."""
+
+ self.assertTrue(isinstance(actual['_time'], (int, long)))
+
+ for k, v in expected.items():
+ self.assertEqual(v, actual[k])
+
+ for k in actual.keys():
+ if k != '_time':
+ self.assertTrue(expected.get(k) is not None)
+
+ def test_structured_output(self):
+ self.logger.log_structured('test_message',
+ {'_level': mozlog.INFO,
+ '_message': 'message one'})
+ self.logger.log_structured('test_message',
+ {'_level': mozlog.INFO,
+ '_message': 'message two'})
+ self.logger.log_structured('error_message',
+ {'_level': mozlog.ERROR,
+ 'diagnostic': 'unexpected error'})
+
+ message_one_expected = {'_namespace': 'test.Logger',
+ '_level': 'INFO',
+ '_message': 'message one',
+ 'action': 'test_message'}
+ message_two_expected = {'_namespace': 'test.Logger',
+ '_level': 'INFO',
+ '_message': 'message two',
+ 'action': 'test_message'}
+ message_three_expected = {'_namespace': 'test.Logger',
+ '_level': 'ERROR',
+ 'diagnostic': 'unexpected error',
+ 'action': 'error_message'}
+
+ message_one_actual = json.loads(self.handler.messages[0])
+ message_two_actual = json.loads(self.handler.messages[1])
+ message_three_actual = json.loads(self.handler.messages[2])
+
+ self.check_messages(message_one_expected, message_one_actual)
+ self.check_messages(message_two_expected, message_two_actual)
+ self.check_messages(message_three_expected, message_three_actual)
+
+ def test_unstructured_conversion(self):
+ """ Tests that logging to a logger with a structured formatter
+ via the traditional logging interface works as expected. """
+ self.logger.info('%s %s %d', 'Message', 'number', 1)
+ self.logger.error('Message number 2')
+ self.logger.debug('Message with %s', 'some extras',
+ extra={'params': {'action': 'mozlog_test_output',
+ 'is_failure': False}})
+ message_one_expected = {'_namespace': 'test.Logger',
+ '_level': 'INFO',
+ '_message': 'Message number 1'}
+ message_two_expected = {'_namespace': 'test.Logger',
+ '_level': 'ERROR',
+ '_message': 'Message number 2'}
+ message_three_expected = {'_namespace': 'test.Logger',
+ '_level': 'DEBUG',
+ '_message': 'Message with some extras',
+ 'action': 'mozlog_test_output',
+ 'is_failure': False}
+
+ message_one_actual = json.loads(self.handler.messages[0])
+ message_two_actual = json.loads(self.handler.messages[1])
+ message_three_actual = json.loads(self.handler.messages[2])
+
+ self.check_messages(message_one_expected, message_one_actual)
+ self.check_messages(message_two_expected, message_two_actual)
+ self.check_messages(message_three_expected, message_three_actual)
+
+ def message_callback(self):
+ if len(self.handler.messages) == 3:
+ message_one_expected = {'_namespace': 'test.Logger',
+ '_level': 'DEBUG',
+ '_message': 'socket message one',
+ 'action': 'test_message'}
+ message_two_expected = {'_namespace': 'test.Logger',
+ '_level': 'DEBUG',
+ '_message': 'socket message two',
+ 'action': 'test_message'}
+ message_three_expected = {'_namespace': 'test.Logger',
+ '_level': 'DEBUG',
+ '_message': 'socket message three',
+ 'action': 'test_message'}
+
+ message_one_actual = json.loads(self.handler.messages[0])
+
+ message_two_actual = json.loads(self.handler.messages[1])
+
+ message_three_actual = json.loads(self.handler.messages[2])
+
+ self.check_messages(message_one_expected, message_one_actual)
+ self.check_messages(message_two_expected, message_two_actual)
+ self.check_messages(message_three_expected, message_three_actual)
+
+ def test_log_listener(self):
+ connection = '127.0.0.1', 0
+ self.log_server = mozlog.LogMessageServer(connection,
+ self.logger,
+ message_callback=self.message_callback,
+ timeout=0.5)
+
+ message_string_one = json.dumps({'_message': 'socket message one',
+ 'action': 'test_message',
+ '_level': 'DEBUG'})
+ message_string_two = json.dumps({'_message': 'socket message two',
+ 'action': 'test_message',
+ '_level': 'DEBUG'})
+
+ message_string_three = json.dumps({'_message': 'socket message three',
+ 'action': 'test_message',
+ '_level': 'DEBUG'})
+
+ message_string = message_string_one + '\n' + \
+ message_string_two + '\n' + \
+ message_string_three + '\n'
+
+ server_thread = threading.Thread(target=self.log_server.handle_request)
+ server_thread.start()
+
+ host, port = self.log_server.server_address
+
+ sock = socket.socket()
+ sock.connect((host, port))
+
+ # Sleeps prevent listener from receiving entire message in a single call
+ # to recv in order to test reconstruction of partial messages.
+ sock.sendall(message_string[:8])
+ time.sleep(.01)
+ sock.sendall(message_string[8:32])
+ time.sleep(.01)
+ sock.sendall(message_string[32:64])
+ time.sleep(.01)
+ sock.sendall(message_string[64:128])
+ time.sleep(.01)
+ sock.sendall(message_string[128:])
+
+ server_thread.join()
+
+
+class Loggable(mozlog.LoggingMixin):
+ """Trivial class inheriting from LoggingMixin"""
+ pass
+
+
+class TestLoggingMixin(unittest.TestCase):
+ """Tests basic use of LoggingMixin"""
+
+ def test_mixin(self):
+ loggable = Loggable()
+ self.assertTrue(not hasattr(loggable, "_logger"))
+ loggable.log(mozlog.INFO, "This will instantiate the logger")
+ self.assertTrue(hasattr(loggable, "_logger"))
+ self.assertEqual(loggable._logger.name, "test_logger.Loggable")
+
+ self.assertRaises(ValueError, loggable.set_logger,
+ "not a logger")
+
+ logger = mozlog.MozLogger('test.mixin')
+ handler = ListHandler()
+ logger.addHandler(handler)
+ loggable.set_logger(logger)
+ self.assertTrue(isinstance(loggable._logger.handlers[0],
+ ListHandler))
+ self.assertEqual(loggable._logger.name, "test.mixin")
+
+ loggable.log(mozlog.WARN, 'message for "log" method')
+ loggable.info('message for "info" method')
+ loggable.error('message for "error" method')
+ loggable.log_structured('test_message',
+ params={'_message': 'message for ' +
+ '"log_structured" method'})
+
+ expected_messages = ['message for "log" method',
+ 'message for "info" method',
+ 'message for "error" method',
+ 'message for "log_structured" method']
+
+ actual_messages = loggable._logger.handlers[0].messages
+ self.assertEqual(expected_messages, actual_messages)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozlog/tests/test_structured.py b/testing/mozbase/mozlog/tests/test_structured.py
new file mode 100644
index 000000000..ab4b5f582
--- /dev/null
+++ b/testing/mozbase/mozlog/tests/test_structured.py
@@ -0,0 +1,1098 @@
+# -*- coding: utf-8 -*-
+import argparse
+import json
+import optparse
+import os
+import StringIO
+import sys
+import unittest
+import signal
+import xml.etree.ElementTree as ET
+
+import mozfile
+
+from mozlog import (
+ commandline,
+ reader,
+ structuredlog,
+ stdadapter,
+ handlers,
+ formatters,
+)
+
+
+class TestHandler(object):
+
+ def __init__(self):
+ self.items = []
+
+ def __call__(self, data):
+ self.items.append(data)
+
+ @property
+ def last_item(self):
+ return self.items[-1]
+
+ @property
+ def empty(self):
+ return not self.items
+
+
+class BaseStructuredTest(unittest.TestCase):
+
+ def setUp(self):
+ self.logger = structuredlog.StructuredLogger("test")
+ self.handler = TestHandler()
+ self.logger.add_handler(self.handler)
+
+ def pop_last_item(self):
+ return self.handler.items.pop()
+
+ def assert_log_equals(self, expected, actual=None):
+ if actual is None:
+ actual = self.pop_last_item()
+
+ all_expected = {"pid": os.getpid(),
+ "thread": "MainThread",
+ "source": "test"}
+ specials = set(["time"])
+
+ all_expected.update(expected)
+ for key, value in all_expected.iteritems():
+ self.assertEqual(actual[key], value)
+
+ self.assertEquals(set(all_expected.keys()) |
+ specials, set(actual.keys()))
+
+
+class TestStatusHandler(BaseStructuredTest):
+
+ def setUp(self):
+ super(TestStatusHandler, self).setUp()
+ self.handler = handlers.StatusHandler()
+ self.logger.add_handler(self.handler)
+
+ def test_failure_run(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "sub1", status='PASS')
+ self.logger.test_status("test1", "sub2", status='TIMEOUT')
+ self.logger.test_end("test1", status='OK')
+ self.logger.suite_end()
+ summary = self.handler.summarize()
+ self.assertIn('TIMEOUT', summary.unexpected_statuses)
+ self.assertEqual(1, summary.unexpected_statuses['TIMEOUT'])
+ self.assertIn('PASS', summary.expected_statuses)
+ self.assertEqual(1, summary.expected_statuses['PASS'])
+ self.assertIn('OK', summary.expected_statuses)
+ self.assertEqual(1, summary.expected_statuses['OK'])
+ self.assertEqual(2, summary.action_counts['test_status'])
+ self.assertEqual(1, summary.action_counts['test_end'])
+
+ def test_error_run(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.error("ERRR!")
+ self.logger.test_end("test1", status='OK')
+ self.logger.test_start("test2")
+ self.logger.test_end("test2", status='OK')
+ self.logger.suite_end()
+ summary = self.handler.summarize()
+ self.assertIn('ERROR', summary.log_level_counts)
+ self.assertEqual(1, summary.log_level_counts['ERROR'])
+ self.assertIn('OK', summary.expected_statuses)
+ self.assertEqual(2, summary.expected_statuses['OK'])
+
+
+class TestStructuredLog(BaseStructuredTest):
+
+ def test_suite_start(self):
+ self.logger.suite_start(["test"])
+ self.assert_log_equals({"action": "suite_start",
+ "tests": ["test"]})
+ self.logger.suite_end()
+
+ def test_suite_end(self):
+ self.logger.suite_start([])
+ self.logger.suite_end()
+ self.assert_log_equals({"action": "suite_end"})
+
+ def test_start(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.assert_log_equals({"action": "test_start",
+ "test": "test1"})
+
+ self.logger.test_start(
+ ("test1", "==", "test1-ref"), path="path/to/test")
+ self.assert_log_equals({"action": "test_start",
+ "test": ("test1", "==", "test1-ref"),
+ "path": "path/to/test"})
+ self.logger.suite_end()
+
+ def test_start_inprogress(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_start("test1")
+ self.assert_log_equals({"action": "log",
+ "message": "test_start for test1 logged while in progress.",
+ "level": "ERROR"})
+ self.logger.suite_end()
+
+ def test_status(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "subtest name", "fail", expected="FAIL",
+ message="Test message")
+ self.assert_log_equals({"action": "test_status",
+ "subtest": "subtest name",
+ "status": "FAIL",
+ "message": "Test message",
+ "test": "test1"})
+ self.logger.test_end("test1", "OK")
+ self.logger.suite_end()
+
+ def test_status_1(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "subtest name", "fail")
+ self.assert_log_equals({"action": "test_status",
+ "subtest": "subtest name",
+ "status": "FAIL",
+ "expected": "PASS",
+ "test": "test1"})
+ self.logger.test_end("test1", "OK")
+ self.logger.suite_end()
+
+ def test_status_2(self):
+ self.assertRaises(ValueError, self.logger.test_status, "test1", "subtest name",
+ "XXXUNKNOWNXXX")
+
+ def test_status_extra(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "subtest name", "FAIL", expected="PASS",
+ extra={"data": 42})
+ self.assert_log_equals({"action": "test_status",
+ "subtest": "subtest name",
+ "status": "FAIL",
+ "expected": "PASS",
+ "test": "test1",
+ "extra": {"data": 42}})
+ self.logger.test_end("test1", "OK")
+ self.logger.suite_end()
+
+ def test_status_stack(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "subtest name", "FAIL", expected="PASS",
+ stack="many\nlines\nof\nstack")
+ self.assert_log_equals({"action": "test_status",
+ "subtest": "subtest name",
+ "status": "FAIL",
+ "expected": "PASS",
+ "test": "test1",
+ "stack": "many\nlines\nof\nstack"})
+ self.logger.test_end("test1", "OK")
+ self.logger.suite_end()
+
+ def test_status_not_started(self):
+ self.logger.test_status("test_UNKNOWN", "subtest", "PASS")
+ self.assertTrue(self.pop_last_item()["message"].startswith(
+ "test_status for test_UNKNOWN logged while not in progress. Logged with data: {"))
+
+ def test_end(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_end("test1", "fail", message="Test message")
+ self.assert_log_equals({"action": "test_end",
+ "status": "FAIL",
+ "expected": "OK",
+ "message": "Test message",
+ "test": "test1"})
+ self.logger.suite_end()
+
+ def test_end_1(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_end(
+ "test1", "PASS", expected="PASS", extra={"data": 123})
+ self.assert_log_equals({"action": "test_end",
+ "status": "PASS",
+ "extra": {"data": 123},
+ "test": "test1"})
+ self.logger.suite_end()
+
+ def test_end_2(self):
+ self.assertRaises(ValueError, self.logger.test_end,
+ "test1", "XXXUNKNOWNXXX")
+
+ def test_end_stack(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_end("test1", "PASS", expected="PASS",
+ stack="many\nlines\nof\nstack")
+ self.assert_log_equals({"action": "test_end",
+ "status": "PASS",
+ "test": "test1",
+ "stack": "many\nlines\nof\nstack"})
+ self.logger.suite_end()
+
+ def test_end_no_start(self):
+ self.logger.test_end("test1", "PASS", expected="PASS")
+ self.assertTrue(self.pop_last_item()["message"].startswith(
+ "test_end for test1 logged while not in progress. Logged with data: {"))
+ self.logger.suite_end()
+
+ def test_end_twice(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test2")
+ self.logger.test_end("test2", "PASS", expected="PASS")
+ self.assert_log_equals({"action": "test_end",
+ "status": "PASS",
+ "test": "test2"})
+ self.logger.test_end("test2", "PASS", expected="PASS")
+ last_item = self.pop_last_item()
+ self.assertEquals(last_item["action"], "log")
+ self.assertEquals(last_item["level"], "ERROR")
+ self.assertTrue(last_item["message"].startswith(
+ "test_end for test2 logged while not in progress. Logged with data: {"))
+ self.logger.suite_end()
+
+ def test_suite_start_twice(self):
+ self.logger.suite_start([])
+ self.assert_log_equals({"action": "suite_start",
+ "tests": []})
+ self.logger.suite_start([])
+ last_item = self.pop_last_item()
+ self.assertEquals(last_item["action"], "log")
+ self.assertEquals(last_item["level"], "ERROR")
+ self.logger.suite_end()
+
+ def test_suite_end_no_start(self):
+ self.logger.suite_start([])
+ self.assert_log_equals({"action": "suite_start",
+ "tests": []})
+ self.logger.suite_end()
+ self.assert_log_equals({"action": "suite_end"})
+ self.logger.suite_end()
+ last_item = self.pop_last_item()
+ self.assertEquals(last_item["action"], "log")
+ self.assertEquals(last_item["level"], "ERROR")
+
+ def test_multiple_loggers_suite_start(self):
+ logger1 = structuredlog.StructuredLogger("test")
+ self.logger.suite_start([])
+ logger1.suite_start([])
+ last_item = self.pop_last_item()
+ self.assertEquals(last_item["action"], "log")
+ self.assertEquals(last_item["level"], "ERROR")
+
+ def test_multiple_loggers_test_start(self):
+ logger1 = structuredlog.StructuredLogger("test")
+ self.logger.suite_start([])
+ self.logger.test_start("test")
+ logger1.test_start("test")
+ last_item = self.pop_last_item()
+ self.assertEquals(last_item["action"], "log")
+ self.assertEquals(last_item["level"], "ERROR")
+
+ def test_process(self):
+ self.logger.process_output(1234, "test output")
+ self.assert_log_equals({"action": "process_output",
+ "process": "1234",
+ "data": "test output"})
+
+ def test_process_start(self):
+ self.logger.process_start(1234)
+ self.assert_log_equals({"action": "process_start",
+ "process": "1234"})
+
+ def test_process_exit(self):
+ self.logger.process_exit(1234, 0)
+ self.assert_log_equals({"action": "process_exit",
+ "process": "1234",
+ "exitcode": 0})
+
+ def test_log(self):
+ for level in ["critical", "error", "warning", "info", "debug"]:
+ getattr(self.logger, level)("message")
+ self.assert_log_equals({"action": "log",
+ "level": level.upper(),
+ "message": "message"})
+
+ def test_logging_adapter(self):
+ import logging
+ logging.basicConfig(level="DEBUG")
+ old_level = logging.root.getEffectiveLevel()
+ logging.root.setLevel("DEBUG")
+
+ std_logger = logging.getLogger("test")
+ std_logger.setLevel("DEBUG")
+
+ logger = stdadapter.std_logging_adapter(std_logger)
+
+ try:
+ for level in ["critical", "error", "warning", "info", "debug"]:
+ getattr(logger, level)("message")
+ self.assert_log_equals({"action": "log",
+ "level": level.upper(),
+ "message": "message"})
+ finally:
+ logging.root.setLevel(old_level)
+
+ def test_add_remove_handlers(self):
+ handler = TestHandler()
+ self.logger.add_handler(handler)
+ self.logger.info("test1")
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "test1"})
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "test1"}, actual=handler.last_item)
+
+ self.logger.remove_handler(handler)
+ self.logger.info("test2")
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "test2"})
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "test1"}, actual=handler.last_item)
+
+ def test_wrapper(self):
+ file_like = structuredlog.StructuredLogFileLike(self.logger)
+
+ file_like.write("line 1")
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "line 1"})
+
+ file_like.write("line 2\n")
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "line 2"})
+
+ file_like.write("line 3\r")
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "line 3"})
+
+ file_like.write("line 4\r\n")
+
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "line 4"})
+
+
+class TestTypeConversions(BaseStructuredTest):
+
+ def test_raw(self):
+ self.logger.log_raw({"action": "suite_start",
+ "tests": [1],
+ "time": "1234"})
+ self.assert_log_equals({"action": "suite_start",
+ "tests": ["1"],
+ "time": 1234})
+ self.logger.suite_end()
+
+ def test_tuple(self):
+ self.logger.suite_start([])
+ self.logger.test_start(("\xf0\x90\x8d\x84\xf0\x90\x8c\xb4\xf0\x90\x8d\x83\xf0\x90\x8d\x84",
+ 42, u"\u16a4"))
+ self.assert_log_equals({"action": "test_start",
+ "test": (u'\U00010344\U00010334\U00010343\U00010344',
+ u"42", u"\u16a4")})
+ self.logger.suite_end()
+
+ def test_non_string_messages(self):
+ self.logger.suite_start([])
+ self.logger.info(1)
+ self.assert_log_equals({"action": "log",
+ "message": "1",
+ "level": "INFO"})
+ self.logger.info([1, (2, '3'), "s", "s" + chr(255)])
+ self.assert_log_equals({"action": "log",
+ "message": "[1, (2, '3'), 's', 's\\xff']",
+ "level": "INFO"})
+ self.logger.suite_end()
+
+ def test_utf8str_write(self):
+ with mozfile.NamedTemporaryFile() as logfile:
+ _fmt = formatters.TbplFormatter()
+ _handler = handlers.StreamHandler(logfile, _fmt)
+ self.logger.add_handler(_handler)
+ self.logger.suite_start([])
+ self.logger.info("☺")
+ logfile.seek(0)
+ data = logfile.readlines()[-1].strip()
+ self.assertEquals(data, "☺")
+ self.logger.suite_end()
+
+ def test_arguments(self):
+ self.logger.info(message="test")
+ self.assert_log_equals({"action": "log",
+ "message": "test",
+ "level": "INFO"})
+
+ self.logger.suite_start([], {})
+ self.assert_log_equals({"action": "suite_start",
+ "tests": [],
+ "run_info": {}})
+ self.logger.test_start(test="test1")
+ self.logger.test_status(
+ "subtest1", "FAIL", test="test1", status="PASS")
+ self.assert_log_equals({"action": "test_status",
+ "test": "test1",
+ "subtest": "subtest1",
+ "status": "PASS",
+ "expected": "FAIL"})
+ self.logger.process_output(123, "data", "test")
+ self.assert_log_equals({"action": "process_output",
+ "process": "123",
+ "command": "test",
+ "data": "data"})
+ self.assertRaises(TypeError, self.logger.test_status, subtest="subtest2",
+ status="FAIL", expected="PASS")
+ self.assertRaises(TypeError, self.logger.test_status, "test1", "subtest1",
+ "PASS", "FAIL", "message", "stack", {}, "unexpected")
+ self.assertRaises(TypeError, self.logger.test_status,
+ "test1", test="test2")
+ self.logger.suite_end()
+
+
+class TestComponentFilter(BaseStructuredTest):
+
+ def test_filter_component(self):
+ component_logger = structuredlog.StructuredLogger(self.logger.name,
+ "test_component")
+ component_logger.component_filter = handlers.LogLevelFilter(
+ lambda x: x, "info")
+
+ self.logger.debug("Test")
+ self.assertFalse(self.handler.empty)
+ self.assert_log_equals({"action": "log",
+ "level": "DEBUG",
+ "message": "Test"})
+ self.assertTrue(self.handler.empty)
+
+ component_logger.info("Test 1")
+ self.assertFalse(self.handler.empty)
+ self.assert_log_equals({"action": "log",
+ "level": "INFO",
+ "message": "Test 1",
+ "component": "test_component"})
+
+ component_logger.debug("Test 2")
+ self.assertTrue(self.handler.empty)
+
+ component_logger.component_filter = None
+
+ component_logger.debug("Test 3")
+ self.assertFalse(self.handler.empty)
+ self.assert_log_equals({"action": "log",
+ "level": "DEBUG",
+ "message": "Test 3",
+ "component": "test_component"})
+
+ def test_filter_default_component(self):
+ component_logger = structuredlog.StructuredLogger(self.logger.name,
+ "test_component")
+
+ self.logger.debug("Test")
+ self.assertFalse(self.handler.empty)
+ self.assert_log_equals({"action": "log",
+ "level": "DEBUG",
+ "message": "Test"})
+
+ self.logger.component_filter = handlers.LogLevelFilter(
+ lambda x: x, "info")
+
+ self.logger.debug("Test 1")
+ self.assertTrue(self.handler.empty)
+
+ component_logger.debug("Test 2")
+ self.assertFalse(self.handler.empty)
+ self.assert_log_equals({"action": "log",
+ "level": "DEBUG",
+ "message": "Test 2",
+ "component": "test_component"})
+
+ self.logger.component_filter = None
+
+ self.logger.debug("Test 3")
+ self.assertFalse(self.handler.empty)
+ self.assert_log_equals({"action": "log",
+ "level": "DEBUG",
+ "message": "Test 3"})
+
+ def test_filter_message_mutuate(self):
+ def filter_mutate(msg):
+ if msg["action"] == "log":
+ msg["message"] = "FILTERED! %s" % msg["message"]
+ return msg
+
+ self.logger.component_filter = filter_mutate
+ self.logger.debug("Test")
+ self.assert_log_equals({"action": "log",
+ "level": "DEBUG",
+ "message": "FILTERED! Test"})
+ self.logger.component_filter = None
+
+
+class FormatterTest(unittest.TestCase):
+
+ def setUp(self):
+ self.position = 0
+ self.logger = structuredlog.StructuredLogger(
+ "test_%s" % type(self).__name__)
+ self.output_file = StringIO.StringIO()
+ self.handler = handlers.StreamHandler(
+ self.output_file, self.get_formatter())
+ self.logger.add_handler(self.handler)
+
+ def set_position(self, pos=None):
+ if pos is None:
+ pos = self.output_file.tell()
+ self.position = pos
+
+ def get_formatter(self):
+ raise NotImplementedError(
+ "FormatterTest subclasses must implement get_formatter")
+
+ @property
+ def loglines(self):
+ self.output_file.seek(self.position)
+ return [line.rstrip() for line in self.output_file.readlines()]
+
+
+class TestHTMLFormatter(FormatterTest):
+
+ def get_formatter(self):
+ return formatters.HTMLFormatter()
+
+ def test_base64_string(self):
+ self.logger.suite_start([])
+ self.logger.test_start("string_test")
+ self.logger.test_end("string_test", "FAIL",
+ extra={"data": "foobar"})
+ self.logger.suite_end()
+ self.assertIn("data:text/html;charset=utf-8;base64,Zm9vYmFy",
+ ''.join(self.loglines))
+
+ def test_base64_unicode(self):
+ self.logger.suite_start([])
+ self.logger.test_start("unicode_test")
+ self.logger.test_end("unicode_test", "FAIL",
+ extra={"data": unichr(0x02A9)})
+ self.logger.suite_end()
+ self.assertIn("data:text/html;charset=utf-8;base64,yqk=",
+ ''.join(self.loglines))
+
+ def test_base64_other(self):
+ self.logger.suite_start([])
+ self.logger.test_start("int_test")
+ self.logger.test_end("int_test", "FAIL",
+ extra={"data": {"foo": "bar"}})
+ self.logger.suite_end()
+ self.assertIn("data:text/html;charset=utf-8;base64,eydmb28nOiAnYmFyJ30=",
+ ''.join(self.loglines))
+
+
+class TestTBPLFormatter(FormatterTest):
+
+ def get_formatter(self):
+ return formatters.TbplFormatter()
+
+ def test_unexpected_message(self):
+ self.logger.suite_start([])
+ self.logger.test_start("timeout_test")
+ self.logger.test_end("timeout_test",
+ "TIMEOUT",
+ message="timed out")
+ self.assertIn("TEST-UNEXPECTED-TIMEOUT | timeout_test | timed out",
+ self.loglines)
+ self.logger.suite_end()
+
+ def test_default_unexpected_end_message(self):
+ self.logger.suite_start([])
+ self.logger.test_start("timeout_test")
+ self.logger.test_end("timeout_test",
+ "TIMEOUT")
+ self.assertIn("TEST-UNEXPECTED-TIMEOUT | timeout_test | expected OK",
+ self.loglines)
+ self.logger.suite_end()
+
+ def test_default_unexpected_status_message(self):
+ self.logger.suite_start([])
+ self.logger.test_start("timeout_test")
+ self.logger.test_status("timeout_test",
+ "subtest",
+ status="TIMEOUT")
+ self.assertIn("TEST-UNEXPECTED-TIMEOUT | timeout_test | subtest - expected PASS",
+ self.loglines)
+ self.logger.test_end("timeout_test", "OK")
+ self.logger.suite_end()
+
+ def test_single_newline(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.set_position()
+ self.logger.test_status("test1", "subtest",
+ status="PASS",
+ expected="FAIL")
+ self.logger.test_end("test1", "OK")
+ self.logger.suite_end()
+
+ # This sequence should not produce blanklines
+ for line in self.loglines:
+ self.assertNotEqual("", line, "No blank line should be present in: %s" %
+ self.loglines)
+
+ def test_process_exit(self):
+ self.logger.process_exit(1234, 0)
+ self.assertIn('TEST-INFO | 1234: exit 0', self.loglines)
+
+ @unittest.skipUnless(os.name == 'posix', 'posix only')
+ def test_process_exit_with_sig(self):
+ # subprocess return code is negative when process
+ # has been killed by signal on posix.
+ self.logger.process_exit(1234, -signal.SIGTERM)
+ self.assertIn, ('TEST-INFO | 1234: killed by SIGTERM', self.loglines)
+
+
+class TestMachFormatter(FormatterTest):
+
+ def get_formatter(self):
+ return formatters.MachFormatter(disable_colors=True)
+
+ def test_summary(self):
+ self.logger.suite_start([])
+
+ # Some tests that pass
+ self.logger.test_start("test1")
+ self.logger.test_end("test1", status="PASS", expected="PASS")
+
+ self.logger.test_start("test2")
+ self.logger.test_end("test2", status="PASS", expected="TIMEOUT")
+
+ self.logger.test_start("test3")
+ self.logger.test_end("test3", status="FAIL", expected="PASS")
+
+ self.set_position()
+ self.logger.suite_end()
+
+ self.assertIn("Ran 3 tests", self.loglines)
+ self.assertIn("Expected results: 1", self.loglines)
+ self.assertIn(
+ "Unexpected results: 2 (FAIL: 1, PASS: 1)", self.loglines)
+ self.assertNotIn("test1", self.loglines)
+ self.assertIn("PASS expected TIMEOUT test2", self.loglines)
+ self.assertIn("FAIL test3", self.loglines)
+
+ def test_summary_subtests(self):
+ self.logger.suite_start([])
+
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "subtest1", status="PASS")
+ self.logger.test_status("test1", "subtest2", status="FAIL")
+ self.logger.test_end("test1", status="OK", expected="OK")
+
+ self.logger.test_start("test2")
+ self.logger.test_status("test2", "subtest1",
+ status="TIMEOUT", expected="PASS")
+ self.logger.test_end("test2", status="TIMEOUT", expected="OK")
+
+ self.set_position()
+ self.logger.suite_end()
+
+ self.assertIn("Ran 5 tests (2 parents, 3 subtests)", self.loglines)
+ self.assertIn("Expected results: 2", self.loglines)
+ self.assertIn(
+ "Unexpected results: 3 (FAIL: 1, TIMEOUT: 2)", self.loglines)
+
+ def test_summary_ok(self):
+ self.logger.suite_start([])
+
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "subtest1", status="PASS")
+ self.logger.test_status("test1", "subtest2", status="PASS")
+ self.logger.test_end("test1", status="OK", expected="OK")
+
+ self.logger.test_start("test2")
+ self.logger.test_status("test2", "subtest1",
+ status="PASS", expected="PASS")
+ self.logger.test_end("test2", status="OK", expected="OK")
+
+ self.set_position()
+ self.logger.suite_end()
+
+ self.assertIn("OK", self.loglines)
+ self.assertIn("Expected results: 5", self.loglines)
+ self.assertIn("Unexpected results: 0", self.loglines)
+
+ def test_process_start(self):
+ self.logger.process_start(1234)
+ self.assertIn("Started process `1234`", self.loglines[0])
+
+ def test_process_start_with_command(self):
+ self.logger.process_start(1234, command='test cmd')
+ self.assertIn("Started process `1234` (test cmd)", self.loglines[0])
+
+ def test_process_exit(self):
+ self.logger.process_exit(1234, 0)
+ self.assertIn('1234: exit 0', self.loglines[0])
+
+ @unittest.skipUnless(os.name == 'posix', 'posix only')
+ def test_process_exit_with_sig(self):
+ # subprocess return code is negative when process
+ # has been killed by signal on posix.
+ self.logger.process_exit(1234, -signal.SIGTERM)
+ self.assertIn('1234: killed by SIGTERM', self.loglines[0])
+
+
+class TestXUnitFormatter(FormatterTest):
+
+ def get_formatter(self):
+ return formatters.XUnitFormatter()
+
+ def log_as_xml(self):
+ return ET.fromstring('\n'.join(self.loglines))
+
+ def test_stacktrace_is_present(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_end(
+ "test1", "fail", message="Test message", stack='this\nis\na\nstack')
+ self.logger.suite_end()
+
+ root = self.log_as_xml()
+ self.assertIn('this\nis\na\nstack', root.find('testcase/failure').text)
+
+ def test_failure_message(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_end("test1", "fail", message="Test message")
+ self.logger.suite_end()
+
+ root = self.log_as_xml()
+ self.assertEquals('Expected OK, got FAIL', root.find(
+ 'testcase/failure').get('message'))
+
+ def test_suite_attrs(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_end("test1", "ok", message="Test message")
+ self.logger.suite_end()
+
+ root = self.log_as_xml()
+ self.assertEqual(root.get('skips'), '0')
+ self.assertEqual(root.get('failures'), '0')
+ self.assertEqual(root.get('errors'), '0')
+ self.assertEqual(root.get('tests'), '1')
+ self.assertEqual(root.get('time'), '0.00')
+
+ def test_time_is_not_rounded(self):
+ # call formatter directly, it is easier here
+ formatter = self.get_formatter()
+ formatter.suite_start(dict(time=55000))
+ formatter.test_start(dict(time=55100))
+ formatter.test_end(
+ dict(time=55558, test='id', message='message', status='PASS'))
+ xml_string = formatter.suite_end(dict(time=55559))
+
+ root = ET.fromstring(xml_string)
+ self.assertEqual(root.get('time'), '0.56')
+ self.assertEqual(root.find('testcase').get('time'), '0.46')
+
+
+class TestCommandline(unittest.TestCase):
+
+ def setUp(self):
+ self.logfile = mozfile.NamedTemporaryFile()
+
+ @property
+ def loglines(self):
+ self.logfile.seek(0)
+ return [line.rstrip() for line in self.logfile.readlines()]
+
+ def test_setup_logging(self):
+ parser = argparse.ArgumentParser()
+ commandline.add_logging_group(parser)
+ args = parser.parse_args(["--log-raw=-"])
+ logger = commandline.setup_logging("test_setup_logging", args, {})
+ self.assertEqual(len(logger.handlers), 1)
+
+ def test_setup_logging_optparse(self):
+ parser = optparse.OptionParser()
+ commandline.add_logging_group(parser)
+ args, _ = parser.parse_args(["--log-raw=-"])
+ logger = commandline.setup_logging("test_optparse", args, {})
+ self.assertEqual(len(logger.handlers), 1)
+ self.assertIsInstance(logger.handlers[0], handlers.StreamHandler)
+
+ def test_limit_formatters(self):
+ parser = argparse.ArgumentParser()
+ commandline.add_logging_group(parser, include_formatters=['raw'])
+ other_formatters = [fmt for fmt in commandline.log_formatters
+ if fmt != 'raw']
+ # check that every formatter except raw is not present
+ for fmt in other_formatters:
+ with self.assertRaises(SystemExit):
+ parser.parse_args(["--log-%s=-" % fmt])
+ with self.assertRaises(SystemExit):
+ parser.parse_args(["--log-%s-level=error" % fmt])
+ # raw is still ok
+ args = parser.parse_args(["--log-raw=-"])
+ logger = commandline.setup_logging("test_setup_logging2", args, {})
+ self.assertEqual(len(logger.handlers), 1)
+
+ def test_setup_logging_optparse_unicode(self):
+ parser = optparse.OptionParser()
+ commandline.add_logging_group(parser)
+ args, _ = parser.parse_args([u"--log-raw=-"])
+ logger = commandline.setup_logging("test_optparse_unicode", args, {})
+ self.assertEqual(len(logger.handlers), 1)
+ self.assertEqual(logger.handlers[0].stream, sys.stdout)
+ self.assertIsInstance(logger.handlers[0], handlers.StreamHandler)
+
+ def test_logging_defaultlevel(self):
+ parser = argparse.ArgumentParser()
+ commandline.add_logging_group(parser)
+
+ args = parser.parse_args(["--log-tbpl=%s" % self.logfile.name])
+ logger = commandline.setup_logging("test_fmtopts", args, {})
+ logger.info("INFO message")
+ logger.debug("DEBUG message")
+ logger.error("ERROR message")
+ # The debug level is not logged by default.
+ self.assertEqual(["INFO message",
+ "ERROR message"],
+ self.loglines)
+
+ def test_logging_errorlevel(self):
+ parser = argparse.ArgumentParser()
+ commandline.add_logging_group(parser)
+ args = parser.parse_args(
+ ["--log-tbpl=%s" % self.logfile.name, "--log-tbpl-level=error"])
+ logger = commandline.setup_logging("test_fmtopts", args, {})
+ logger.info("INFO message")
+ logger.debug("DEBUG message")
+ logger.error("ERROR message")
+
+ # Only the error level and above were requested.
+ self.assertEqual(["ERROR message"],
+ self.loglines)
+
+ def test_logging_debuglevel(self):
+ parser = argparse.ArgumentParser()
+ commandline.add_logging_group(parser)
+ args = parser.parse_args(
+ ["--log-tbpl=%s" % self.logfile.name, "--log-tbpl-level=debug"])
+ logger = commandline.setup_logging("test_fmtopts", args, {})
+ logger.info("INFO message")
+ logger.debug("DEBUG message")
+ logger.error("ERROR message")
+ # Requesting a lower log level than default works as expected.
+ self.assertEqual(["INFO message",
+ "DEBUG message",
+ "ERROR message"],
+ self.loglines)
+
+ def test_unused_options(self):
+ parser = argparse.ArgumentParser()
+ commandline.add_logging_group(parser)
+ args = parser.parse_args(["--log-tbpl-level=error"])
+ self.assertRaises(ValueError, commandline.setup_logging,
+ "test_fmtopts", args, {})
+
+
+class TestBuffer(BaseStructuredTest):
+
+ def assert_log_equals(self, expected, actual=None):
+ if actual is None:
+ actual = self.pop_last_item()
+
+ all_expected = {"pid": os.getpid(),
+ "thread": "MainThread",
+ "source": "testBuffer"}
+ specials = set(["time"])
+
+ all_expected.update(expected)
+ for key, value in all_expected.iteritems():
+ self.assertEqual(actual[key], value)
+
+ self.assertEquals(set(all_expected.keys()) |
+ specials, set(actual.keys()))
+
+ def setUp(self):
+ self.logger = structuredlog.StructuredLogger("testBuffer")
+ self.handler = handlers.BufferHandler(TestHandler(), message_limit=4)
+ self.logger.add_handler(self.handler)
+
+ def tearDown(self):
+ self.logger.remove_handler(self.handler)
+
+ def pop_last_item(self):
+ return self.handler.inner.items.pop()
+
+ def test_buffer_messages(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.send_message("buffer", "off")
+ self.logger.test_status("test1", "sub1", status="PASS")
+ # Even for buffered actions, the buffer does not interfere if
+ # buffering is turned off.
+ self.assert_log_equals({"action": "test_status",
+ "test": "test1",
+ "status": "PASS",
+ "subtest": "sub1"})
+ self.logger.send_message("buffer", "on")
+ self.logger.test_status("test1", "sub2", status="PASS")
+ self.logger.test_status("test1", "sub3", status="PASS")
+ self.logger.test_status("test1", "sub4", status="PASS")
+ self.logger.test_status("test1", "sub5", status="PASS")
+ self.logger.test_status("test1", "sub6", status="PASS")
+ self.logger.test_status("test1", "sub7", status="PASS")
+ self.logger.test_end("test1", status="OK")
+ self.logger.send_message("buffer", "clear")
+ self.assert_log_equals({"action": "test_end",
+ "test": "test1",
+ "status": "OK"})
+ self.logger.suite_end()
+
+ def test_buffer_size(self):
+ self.logger.suite_start([])
+ self.logger.test_start("test1")
+ self.logger.test_status("test1", "sub1", status="PASS")
+ self.logger.test_status("test1", "sub2", status="PASS")
+ self.logger.test_status("test1", "sub3", status="PASS")
+ self.logger.test_status("test1", "sub4", status="PASS")
+ self.logger.test_status("test1", "sub5", status="PASS")
+ self.logger.test_status("test1", "sub6", status="PASS")
+ self.logger.test_status("test1", "sub7", status="PASS")
+
+ # No test status messages made it to the underlying handler.
+ self.assert_log_equals({"action": "test_start",
+ "test": "test1"})
+
+ # The buffer's actual size never grows beyond the specified limit.
+ self.assertEquals(len(self.handler._buffer), 4)
+
+ self.logger.test_status("test1", "sub8", status="FAIL")
+ # The number of messages deleted comes back in a list.
+ self.assertEquals([4], self.logger.send_message("buffer", "flush"))
+
+ # When the buffer is dumped, the failure is the last thing logged
+ self.assert_log_equals({"action": "test_status",
+ "test": "test1",
+ "subtest": "sub8",
+ "status": "FAIL",
+ "expected": "PASS"})
+ # Three additional messages should have been retained for context
+ self.assert_log_equals({"action": "test_status",
+ "test": "test1",
+ "status": "PASS",
+ "subtest": "sub7"})
+ self.assert_log_equals({"action": "test_status",
+ "test": "test1",
+ "status": "PASS",
+ "subtest": "sub6"})
+ self.assert_log_equals({"action": "test_status",
+ "test": "test1",
+ "status": "PASS",
+ "subtest": "sub5"})
+ self.assert_log_equals({"action": "suite_start",
+ "tests": []})
+
+
+class TestReader(unittest.TestCase):
+
+ def to_file_like(self, obj):
+ data_str = "\n".join(json.dumps(item) for item in obj)
+ return StringIO.StringIO(data_str)
+
+ def test_read(self):
+ data = [{"action": "action_0", "data": "data_0"},
+ {"action": "action_1", "data": "data_1"}]
+
+ f = self.to_file_like(data)
+ self.assertEquals(data, list(reader.read(f)))
+
+ def test_imap_log(self):
+ data = [{"action": "action_0", "data": "data_0"},
+ {"action": "action_1", "data": "data_1"}]
+
+ f = self.to_file_like(data)
+
+ def f_action_0(item):
+ return ("action_0", item["data"])
+
+ def f_action_1(item):
+ return ("action_1", item["data"])
+
+ res_iter = reader.imap_log(reader.read(f),
+ {"action_0": f_action_0,
+ "action_1": f_action_1})
+ self.assertEquals([("action_0", "data_0"), ("action_1", "data_1")],
+ list(res_iter))
+
+ def test_each_log(self):
+ data = [{"action": "action_0", "data": "data_0"},
+ {"action": "action_1", "data": "data_1"}]
+
+ f = self.to_file_like(data)
+
+ count = {"action_0": 0,
+ "action_1": 0}
+
+ def f_action_0(item):
+ count[item["action"]] += 1
+
+ def f_action_1(item):
+ count[item["action"]] += 2
+
+ reader.each_log(reader.read(f),
+ {"action_0": f_action_0,
+ "action_1": f_action_1})
+
+ self.assertEquals({"action_0": 1, "action_1": 2}, count)
+
+ def test_handler(self):
+ data = [{"action": "action_0", "data": "data_0"},
+ {"action": "action_1", "data": "data_1"}]
+
+ f = self.to_file_like(data)
+
+ test = self
+
+ class ReaderTestHandler(reader.LogHandler):
+
+ def __init__(self):
+ self.action_0_count = 0
+ self.action_1_count = 0
+
+ def action_0(self, item):
+ test.assertEquals(item["action"], "action_0")
+ self.action_0_count += 1
+
+ def action_1(self, item):
+ test.assertEquals(item["action"], "action_1")
+ self.action_1_count += 1
+
+ handler = ReaderTestHandler()
+ reader.handle_log(reader.read(f), handler)
+
+ self.assertEquals(handler.action_0_count, 1)
+ self.assertEquals(handler.action_1_count, 1)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/mozbase/moznetwork/moznetwork/__init__.py b/testing/mozbase/moznetwork/moznetwork/__init__.py
new file mode 100644
index 000000000..df2097cb0
--- /dev/null
+++ b/testing/mozbase/moznetwork/moznetwork/__init__.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+moznetwork is a very simple module designed for one task: getting the
+network address of the current machine.
+
+Example usage:
+
+::
+
+ import moznetwork
+
+ try:
+ ip = moznetwork.get_ip()
+ print "The external IP of your machine is '%s'" % ip
+ except moznetwork.NetworkError:
+ print "Unable to determine IP address of machine"
+ raise
+
+"""
+
+from moznetwork import get_ip
+
+__all__ = ['get_ip']
diff --git a/testing/mozbase/moznetwork/moznetwork/moznetwork.py b/testing/mozbase/moznetwork/moznetwork/moznetwork.py
new file mode 100644
index 000000000..537649603
--- /dev/null
+++ b/testing/mozbase/moznetwork/moznetwork/moznetwork.py
@@ -0,0 +1,172 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import array
+import re
+import socket
+import struct
+import subprocess
+import sys
+
+import mozinfo
+import mozlog
+
+if mozinfo.isLinux:
+ import fcntl
+
+
+class NetworkError(Exception):
+ """Exception thrown when unable to obtain interface or IP."""
+
+
+def _get_logger():
+ logger = mozlog.get_default_logger(component='moznetwork')
+ if not logger:
+ logger = mozlog.unstructured.getLogger('moznetwork')
+ return logger
+
+
+def _get_interface_list():
+ """Provides a list of available network interfaces
+ as a list of tuples (name, ip)"""
+ logger = _get_logger()
+ logger.debug('Gathering interface list')
+ max_iface = 32 # Maximum number of interfaces(Aribtrary)
+ bytes = max_iface * 32
+ is_32bit = (8 * struct.calcsize("P")) == 32 # Set Architecture
+ struct_size = 32 if is_32bit else 40
+
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ names = array.array('B', '\0' * bytes)
+ outbytes = struct.unpack('iL', fcntl.ioctl(
+ s.fileno(),
+ 0x8912, # SIOCGIFCONF
+ struct.pack('iL', bytes, names.buffer_info()[0])
+ ))[0]
+ namestr = names.tostring()
+ return [(namestr[i:i + 32].split('\0', 1)[0],
+ socket.inet_ntoa(namestr[i + 20:i + 24]))
+ for i in range(0, outbytes, struct_size)]
+
+ except IOError:
+ raise NetworkError('Unable to call ioctl with SIOCGIFCONF')
+
+
+def _proc_matches(args, regex):
+ """Helper returns the matches of regex in the output of a process created with
+ the given arguments"""
+ output = subprocess.Popen(args=args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT).stdout.read()
+ return re.findall(regex, output)
+
+
+def _parse_ifconfig():
+ """Parse the output of running ifconfig on mac in cases other methods
+ have failed"""
+ logger = _get_logger()
+ logger.debug('Parsing ifconfig')
+
+ # Attempt to determine the default interface in use.
+ default_iface = _proc_matches(['route', '-n', 'get', 'default'],
+ 'interface: (\w+)')
+
+ if default_iface:
+ addr_list = _proc_matches(['ifconfig', default_iface[0]],
+ 'inet (\d+.\d+.\d+.\d+)')
+ if addr_list:
+ logger.debug('Default interface: [%s] %s' % (default_iface[0],
+ addr_list[0]))
+ if not addr_list[0].startswith('127.'):
+ return addr_list[0]
+
+ # Iterate over plausible interfaces if we didn't find a suitable default.
+ for iface in ['en%s' % i for i in range(10)]:
+ addr_list = _proc_matches(['ifconfig', iface],
+ 'inet (\d+.\d+.\d+.\d+)')
+ if addr_list:
+ logger.debug('Interface: [%s] %s' % (iface, addr_list[0]))
+ if not addr_list[0].startswith('127.'):
+ return addr_list[0]
+
+ # Just return any that isn't localhost. If we can't find one, we have
+ # failed.
+ addrs = _proc_matches(['ifconfig'],
+ 'inet (\d+.\d+.\d+.\d+)')
+ try:
+ return [addr for addr in addrs if not addr.startswith('127.')][0]
+ except IndexError:
+ return None
+
+
+def get_ip():
+ """Provides an available network interface address, for example
+ "192.168.1.3".
+
+ A `NetworkError` exception is raised in case of failure."""
+ logger = _get_logger()
+ try:
+ hostname = socket.gethostname()
+ try:
+ logger.debug('Retrieving IP for %s' % hostname)
+ ips = socket.gethostbyname_ex(hostname)[2]
+ except socket.gaierror: # for Mac OS X
+ hostname += '.local'
+ logger.debug('Retrieving IP for %s' % hostname)
+ ips = socket.gethostbyname_ex(hostname)[2]
+ if len(ips) == 1:
+ ip = ips[0]
+ elif len(ips) > 1:
+ logger.debug('Multiple addresses found: %s' % ips)
+ # no fallback on Windows so take the first address
+ ip = ips[0] if mozinfo.isWin else None
+ else:
+ ip = None
+ except socket.gaierror:
+ # sometimes the hostname doesn't resolve to an ip address, in which
+ # case this will always fail
+ ip = None
+
+ if ip is None or ip.startswith("127."):
+ if mozinfo.isLinux:
+ interfaces = _get_interface_list()
+ for ifconfig in interfaces:
+ logger.debug('Interface: [%s] %s' % (ifconfig[0], ifconfig[1]))
+ if ifconfig[0] == 'lo':
+ continue
+ else:
+ return ifconfig[1]
+ elif mozinfo.isMac:
+ ip = _parse_ifconfig()
+
+ if ip is None:
+ raise NetworkError('Unable to obtain network address')
+
+ return ip
+
+
+def get_lan_ip():
+ """Deprecated. Please use get_ip() instead."""
+ return get_ip()
+
+
+def cli(args=sys.argv[1:]):
+ parser = argparse.ArgumentParser(
+ description='Retrieve IP address')
+ mozlog.commandline.add_logging_group(
+ parser,
+ include_formatters=mozlog.commandline.TEXT_FORMATTERS
+ )
+
+ args = parser.parse_args()
+ mozlog.commandline.setup_logging(
+ 'mozversion', args, {'mach': sys.stdout})
+
+ _get_logger().info('IP address: %s' % get_ip())
+
+
+if __name__ == '__main__':
+ cli()
diff --git a/testing/mozbase/moznetwork/setup.py b/testing/mozbase/moznetwork/setup.py
new file mode 100644
index 000000000..2bc62f8dc
--- /dev/null
+++ b/testing/mozbase/moznetwork/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = '0.27'
+
+deps = ['mozinfo',
+ 'mozlog >= 3.0',
+ ]
+
+setup(name='moznetwork',
+ version=PACKAGE_VERSION,
+ description="Library of network utilities for use in Mozilla testing",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['moznetwork'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ entry_points={'console_scripts': [
+ 'moznetwork = moznetwork:cli']},
+ )
diff --git a/testing/mozbase/moznetwork/tests/manifest.ini b/testing/mozbase/moznetwork/tests/manifest.ini
new file mode 100644
index 000000000..528fdea7b
--- /dev/null
+++ b/testing/mozbase/moznetwork/tests/manifest.ini
@@ -0,0 +1 @@
+[test.py]
diff --git a/testing/mozbase/moznetwork/tests/test.py b/testing/mozbase/moznetwork/tests/test.py
new file mode 100644
index 000000000..79eee6b03
--- /dev/null
+++ b/testing/mozbase/moznetwork/tests/test.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+Unit-Tests for moznetwork
+"""
+
+import os
+import mock
+import mozinfo
+import moznetwork
+import re
+import subprocess
+import unittest
+
+
+def verify_ip_in_list(ip):
+ """
+ Helper Method to check if `ip` is listed in Network Adresses
+ returned by ipconfig/ifconfig, depending on the platform in use
+
+ :param ip: IPv4 address in the xxx.xxx.xxx.xxx format as a string
+ Example Usage:
+ verify_ip_in_list('192.168.0.1')
+
+ returns True if the `ip` is in the list of IPs in ipconfig/ifconfig
+ """
+
+ # Regex to match IPv4 addresses.
+ # 0-255.0-255.0-255.0-255, note order is important here.
+ regexip = re.compile("((25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)\.){3}"
+ "(25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)")
+
+ if mozinfo.isLinux or mozinfo.isMac or mozinfo.isBsd:
+ # if "/sbin/ifconfig" exist, use it because it may not be in the
+ # PATH (at least on some linux platforms)
+ if os.path.isfile('/sbin/ifconfig') and os.access('/sbin/ifconfig',
+ os.X_OK):
+ args = ['/sbin/ifconfig']
+ else:
+ args = ["ifconfig"]
+
+ if mozinfo.isWin:
+ args = ["ipconfig"]
+
+ ps = subprocess.Popen(args, stdout=subprocess.PIPE)
+ standardoutput, standarderror = ps.communicate()
+
+ # Generate a list of IPs by parsing the output of ip/ifconfig
+ ip_list = [x.group() for x in re.finditer(regexip, standardoutput)]
+
+ # Check if ip is in list
+ if ip in ip_list:
+ return True
+ else:
+ return False
+
+
+class TestGetIP(unittest.TestCase):
+
+ def test_get_ip(self):
+ """ Attempt to test the IP address returned by
+ moznetwork.get_ip() is valid """
+
+ ip = moznetwork.get_ip()
+
+ # Check the IP returned by moznetwork is in the list
+ self.assertTrue(verify_ip_in_list(ip))
+
+ def test_get_ip_using_get_interface(self):
+ """ Test that the control flow path for get_ip() using
+ _get_interface_list() is works """
+
+ if mozinfo.isLinux or mozinfo.isMac:
+
+ with mock.patch('socket.gethostbyname') as byname:
+ # Force socket.gethostbyname to return None
+ byname.return_value = None
+
+ ip = moznetwork.get_ip()
+
+ # Check the IP returned by moznetwork is in the list
+ self.assertTrue(verify_ip_in_list(ip))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/mozprocess/__init__.py b/testing/mozbase/mozprocess/mozprocess/__init__.py
new file mode 100644
index 000000000..0b238c2b2
--- /dev/null
+++ b/testing/mozbase/mozprocess/mozprocess/__init__.py
@@ -0,0 +1,8 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from .processhandler import *
diff --git a/testing/mozbase/mozprocess/mozprocess/processhandler.py b/testing/mozbase/mozprocess/mozprocess/processhandler.py
new file mode 100644
index 000000000..661a7820e
--- /dev/null
+++ b/testing/mozbase/mozprocess/mozprocess/processhandler.py
@@ -0,0 +1,1079 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import os
+import signal
+import subprocess
+import sys
+import threading
+import time
+import traceback
+from Queue import Queue, Empty
+from datetime import datetime
+
+__all__ = ['ProcessHandlerMixin', 'ProcessHandler', 'LogOutput',
+ 'StoreOutput', 'StreamOutput']
+
+# Set the MOZPROCESS_DEBUG environment variable to 1 to see some debugging output
+MOZPROCESS_DEBUG = os.getenv("MOZPROCESS_DEBUG")
+
+# We dont use mozinfo because it is expensive to import, see bug 933558.
+isWin = os.name == "nt"
+isPosix = os.name == "posix" # includes MacOS X
+
+if isWin:
+ from ctypes import sizeof, addressof, c_ulong, byref, WinError, c_longlong
+ from . import winprocess
+ from .qijo import JobObjectAssociateCompletionPortInformation,\
+ JOBOBJECT_ASSOCIATE_COMPLETION_PORT, JobObjectExtendedLimitInformation,\
+ JOBOBJECT_BASIC_LIMIT_INFORMATION, JOBOBJECT_EXTENDED_LIMIT_INFORMATION, IO_COUNTERS
+
+
+class ProcessHandlerMixin(object):
+ """
+ A class for launching and manipulating local processes.
+
+ :param cmd: command to run. May be a string or a list. If specified as a list, the first
+ element will be interpreted as the command, and all additional elements will be interpreted
+ as arguments to that command.
+ :param args: list of arguments to pass to the command (defaults to None). Must not be set when
+ `cmd` is specified as a list.
+ :param cwd: working directory for command (defaults to None).
+ :param env: is the environment to use for the process (defaults to os.environ).
+ :param ignore_children: causes system to ignore child processes when True,
+ defaults to False (which tracks child processes).
+ :param kill_on_timeout: when True, the process will be killed when a timeout is reached.
+ When False, the caller is responsible for killing the process.
+ Failure to do so could cause a call to wait() to hang indefinitely. (Defaults to True.)
+ :param processOutputLine: function or list of functions to be called for
+ each line of output produced by the process (defaults to an empty
+ list).
+ :param processStderrLine: function or list of functions to be called
+ for each line of error output - stderr - produced by the process
+ (defaults to an empty list). If this is not specified, stderr lines
+ will be sent to the *processOutputLine* callbacks.
+ :param onTimeout: function or list of functions to be called when the process times out.
+ :param onFinish: function or list of functions to be called when the process terminates
+ normally without timing out.
+ :param kwargs: additional keyword args to pass directly into Popen.
+
+ NOTE: Child processes will be tracked by default. If for any reason
+ we are unable to track child processes and ignore_children is set to False,
+ then we will fall back to only tracking the root process. The fallback
+ will be logged.
+ """
+
+ class Process(subprocess.Popen):
+ """
+ Represents our view of a subprocess.
+ It adds a kill() method which allows it to be stopped explicitly.
+ """
+
+ MAX_IOCOMPLETION_PORT_NOTIFICATION_DELAY = 180
+ MAX_PROCESS_KILL_DELAY = 30
+ TIMEOUT_BEFORE_SIGKILL = 1.0
+
+ def __init__(self,
+ args,
+ bufsize=0,
+ executable=None,
+ stdin=None,
+ stdout=None,
+ stderr=None,
+ preexec_fn=None,
+ close_fds=False,
+ shell=False,
+ cwd=None,
+ env=None,
+ universal_newlines=False,
+ startupinfo=None,
+ creationflags=0,
+ ignore_children=False):
+
+ # Parameter for whether or not we should attempt to track child processes
+ self._ignore_children = ignore_children
+
+ if not self._ignore_children and not isWin:
+ # Set the process group id for linux systems
+ # Sets process group id to the pid of the parent process
+ # NOTE: This prevents you from using preexec_fn and managing
+ # child processes, TODO: Ideally, find a way around this
+ def setpgidfn():
+ os.setpgid(0, 0)
+ preexec_fn = setpgidfn
+
+ try:
+ subprocess.Popen.__init__(self, args, bufsize, executable,
+ stdin, stdout, stderr,
+ preexec_fn, close_fds,
+ shell, cwd, env,
+ universal_newlines, startupinfo, creationflags)
+ except OSError:
+ print >> sys.stderr, args
+ raise
+
+ def debug(self, msg):
+ if not MOZPROCESS_DEBUG:
+ return
+ thread = threading.current_thread().name
+ print("DBG::MOZPROC PID:{} ({}) | {}".format(self.pid, thread, msg))
+
+ def __del__(self, _maxint=sys.maxint):
+ if isWin:
+ handle = getattr(self, '_handle', None)
+ if handle:
+ if hasattr(self, '_internal_poll'):
+ self._internal_poll(_deadstate=_maxint)
+ else:
+ self.poll(_deadstate=sys.maxint)
+ if handle or self._job or self._io_port:
+ self._cleanup()
+ else:
+ subprocess.Popen.__del__(self)
+
+ def kill(self, sig=None):
+ if isWin:
+ if not self._ignore_children and self._handle and self._job:
+ self.debug("calling TerminateJobObject")
+ winprocess.TerminateJobObject(self._job, winprocess.ERROR_CONTROL_C_EXIT)
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ elif self._handle:
+ self.debug("calling TerminateProcess")
+ try:
+ winprocess.TerminateProcess(self._handle, winprocess.ERROR_CONTROL_C_EXIT)
+ except:
+ traceback.print_exc()
+ raise OSError("Could not terminate process")
+ finally:
+ winprocess.GetExitCodeProcess(self._handle)
+ self._cleanup()
+ else:
+ def send_sig(sig):
+ pid = self.detached_pid or self.pid
+ if not self._ignore_children:
+ try:
+ os.killpg(pid, sig)
+ except BaseException as e:
+ # Error 3 is a "no such process" failure, which is fine because the
+ # application might already have been terminated itself. Any other
+ # error would indicate a problem in killing the process.
+ if getattr(e, "errno", None) != 3:
+ print >> sys.stderr, "Could not terminate process: %s" % self.pid
+ raise
+ else:
+ os.kill(pid, sig)
+
+ if sig is None and isPosix:
+ # ask the process for termination and wait a bit
+ send_sig(signal.SIGTERM)
+ limit = time.time() + self.TIMEOUT_BEFORE_SIGKILL
+ while time.time() <= limit:
+ if self.poll() is not None:
+ # process terminated nicely
+ break
+ time.sleep(0.02)
+ else:
+ # process did not terminate - send SIGKILL to force
+ send_sig(signal.SIGKILL)
+ else:
+ # a signal was explicitly set or not posix
+ send_sig(sig or signal.SIGKILL)
+
+ self.returncode = self.wait()
+ self._cleanup()
+ return self.returncode
+
+ def poll(self):
+ """ Popen.poll
+ Check if child process has terminated. Set and return returncode attribute.
+ """
+ # If we have a handle, the process is alive
+ if isWin and getattr(self, '_handle', None):
+ return None
+
+ return subprocess.Popen.poll(self)
+
+ def wait(self):
+ """ Popen.wait
+ Called to wait for a running process to shut down and return
+ its exit code
+ Returns the main process's exit code
+ """
+ # This call will be different for each OS
+ self.returncode = self._wait()
+ self._cleanup()
+ return self.returncode
+
+ """ Private Members of Process class """
+
+ if isWin:
+ # Redefine the execute child so that we can track process groups
+ def _execute_child(self, *args_tuple):
+ # workaround for bug 950894
+ if sys.hexversion < 0x02070600: # prior to 2.7.6
+ (args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines, startupinfo,
+ creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite) = args_tuple
+ to_close = set()
+ else: # 2.7.6 and later
+ (args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines, startupinfo,
+ creationflags, shell, to_close,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite) = args_tuple
+ if not isinstance(args, basestring):
+ args = subprocess.list2cmdline(args)
+
+ # Always or in the create new process group
+ creationflags |= winprocess.CREATE_NEW_PROCESS_GROUP
+
+ if startupinfo is None:
+ startupinfo = winprocess.STARTUPINFO()
+
+ if None not in (p2cread, c2pwrite, errwrite):
+ startupinfo.dwFlags |= winprocess.STARTF_USESTDHANDLES
+ startupinfo.hStdInput = int(p2cread)
+ startupinfo.hStdOutput = int(c2pwrite)
+ startupinfo.hStdError = int(errwrite)
+ if shell:
+ startupinfo.dwFlags |= winprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = winprocess.SW_HIDE
+ comspec = os.environ.get("COMSPEC", "cmd.exe")
+ args = comspec + " /c " + args
+
+ # Determine if we can create a job or create nested jobs.
+ can_create_job = winprocess.CanCreateJobObject()
+ can_nest_jobs = self._can_nest_jobs()
+
+ # Ensure we write a warning message if we are falling back
+ if not (can_create_job or can_nest_jobs) and not self._ignore_children:
+ # We can't create job objects AND the user wanted us to
+ # Warn the user about this.
+ print >> sys.stderr, \
+ "ProcessManager UNABLE to use job objects to manage child processes"
+
+ # set process creation flags
+ creationflags |= winprocess.CREATE_SUSPENDED
+ creationflags |= winprocess.CREATE_UNICODE_ENVIRONMENT
+ if can_create_job:
+ creationflags |= winprocess.CREATE_BREAKAWAY_FROM_JOB
+ if not (can_create_job or can_nest_jobs):
+ # Since we've warned, we just log info here to inform you
+ # of the consequence of setting ignore_children = True
+ print "ProcessManager NOT managing child processes"
+
+ # create the process
+ hp, ht, pid, tid = winprocess.CreateProcess(
+ executable, args,
+ None, None, # No special security
+ 1, # Must inherit handles!
+ creationflags,
+ winprocess.EnvironmentBlock(env),
+ cwd, startupinfo)
+ self._child_created = True
+ self._handle = hp
+ self._thread = ht
+ self.pid = pid
+ self.tid = tid
+
+ if not self._ignore_children and (can_create_job or can_nest_jobs):
+ try:
+ # We create a new job for this process, so that we can kill
+ # the process and any sub-processes
+ # Create the IO Completion Port
+ self._io_port = winprocess.CreateIoCompletionPort()
+ self._job = winprocess.CreateJobObject()
+
+ # Now associate the io comp port and the job object
+ joacp = JOBOBJECT_ASSOCIATE_COMPLETION_PORT(winprocess.COMPKEY_JOBOBJECT,
+ self._io_port)
+ winprocess.SetInformationJobObject(
+ self._job,
+ JobObjectAssociateCompletionPortInformation,
+ addressof(joacp),
+ sizeof(joacp)
+ )
+
+ # Allow subprocesses to break away from us - necessary for
+ # flash with protected mode
+ jbli = JOBOBJECT_BASIC_LIMIT_INFORMATION(
+ c_longlong(0), # per process time limit (ignored)
+ c_longlong(0), # per job user time limit (ignored)
+ winprocess.JOB_OBJECT_LIMIT_BREAKAWAY_OK,
+ 0, # min working set (ignored)
+ 0, # max working set (ignored)
+ 0, # active process limit (ignored)
+ None, # affinity (ignored)
+ 0, # Priority class (ignored)
+ 0, # Scheduling class (ignored)
+ )
+
+ iocntr = IO_COUNTERS()
+ jeli = JOBOBJECT_EXTENDED_LIMIT_INFORMATION(
+ jbli, # basic limit info struct
+ iocntr, # io_counters (ignored)
+ 0, # process mem limit (ignored)
+ 0, # job mem limit (ignored)
+ 0, # peak process limit (ignored)
+ 0) # peak job limit (ignored)
+
+ winprocess.SetInformationJobObject(self._job,
+ JobObjectExtendedLimitInformation,
+ addressof(jeli),
+ sizeof(jeli)
+ )
+
+ # Assign the job object to the process
+ winprocess.AssignProcessToJobObject(self._job, int(hp))
+
+ # It's overkill, but we use Queue to signal between threads
+ # because it handles errors more gracefully than event or condition.
+ self._process_events = Queue()
+
+ # Spin up our thread for managing the IO Completion Port
+ self._procmgrthread = threading.Thread(target=self._procmgr)
+ except:
+ print >> sys.stderr, """Exception trying to use job objects;
+falling back to not using job objects for managing child processes"""
+ tb = traceback.format_exc()
+ print >> sys.stderr, tb
+ # Ensure no dangling handles left behind
+ self._cleanup_job_io_port()
+ else:
+ self._job = None
+
+ winprocess.ResumeThread(int(ht))
+ if getattr(self, '_procmgrthread', None):
+ self._procmgrthread.start()
+ ht.Close()
+
+ for i in (p2cread, c2pwrite, errwrite):
+ if i is not None:
+ i.Close()
+
+ # Per:
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/hh448388%28v=vs.85%29.aspx
+ # Nesting jobs came in with windows versions starting with 6.2 according to the table
+ # on this page:
+ # https://msdn.microsoft.com/en-us/library/ms724834%28v=vs.85%29.aspx
+ def _can_nest_jobs(self):
+ winver = sys.getwindowsversion()
+ return (winver.major > 6 or
+ winver.major == 6 and winver.minor >= 2)
+
+ # Windows Process Manager - watches the IO Completion Port and
+ # keeps track of child processes
+ def _procmgr(self):
+ if not (self._io_port) or not (self._job):
+ return
+
+ try:
+ self._poll_iocompletion_port()
+ except KeyboardInterrupt:
+ raise KeyboardInterrupt
+
+ def _poll_iocompletion_port(self):
+ # Watch the IO Completion port for status
+ self._spawned_procs = {}
+ countdowntokill = 0
+
+ self.debug("start polling IO completion port")
+
+ while True:
+ msgid = c_ulong(0)
+ compkey = c_ulong(0)
+ pid = c_ulong(0)
+ portstatus = winprocess.GetQueuedCompletionStatus(self._io_port,
+ byref(msgid),
+ byref(compkey),
+ byref(pid),
+ 5000)
+
+ # If the countdowntokill has been activated, we need to check
+ # if we should start killing the children or not.
+ if countdowntokill != 0:
+ diff = datetime.now() - countdowntokill
+ # Arbitrarily wait 3 minutes for windows to get its act together
+ # Windows sometimes takes a small nap between notifying the
+ # IO Completion port and actually killing the children, and we
+ # don't want to mistake that situation for the situation of an unexpected
+ # parent abort (which is what we're looking for here).
+ if diff.seconds > self.MAX_IOCOMPLETION_PORT_NOTIFICATION_DELAY:
+ print >> sys.stderr, \
+ "WARNING | IO Completion Port failed to signal process shutdown"
+ print >> sys.stderr, \
+ "Parent process %s exited with children alive:" % self.pid
+ print >> sys.stderr, \
+ "PIDS: %s" % ', '.join([str(i) for i in self._spawned_procs])
+ print >> sys.stderr, \
+ "Attempting to kill them, but no guarantee of success"
+
+ self.kill()
+ self._process_events.put({self.pid: 'FINISHED'})
+ break
+
+ if not portstatus:
+ # Check to see what happened
+ errcode = winprocess.GetLastError()
+ if errcode == winprocess.ERROR_ABANDONED_WAIT_0:
+ # Then something has killed the port, break the loop
+ print >> sys.stderr, "IO Completion Port unexpectedly closed"
+ self._process_events.put({self.pid: 'FINISHED'})
+ break
+ elif errcode == winprocess.WAIT_TIMEOUT:
+ # Timeouts are expected, just keep on polling
+ continue
+ else:
+ print >> sys.stderr, \
+ "Error Code %s trying to query IO Completion Port, " \
+ "exiting" % errcode
+ raise WinError(errcode)
+ break
+
+ if compkey.value == winprocess.COMPKEY_TERMINATE.value:
+ self.debug("compkeyterminate detected")
+ # Then we're done
+ break
+
+ # Check the status of the IO Port and do things based on it
+ if compkey.value == winprocess.COMPKEY_JOBOBJECT.value:
+ if msgid.value == winprocess.JOB_OBJECT_MSG_ACTIVE_PROCESS_ZERO:
+ # No processes left, time to shut down
+ # Signal anyone waiting on us that it is safe to shut down
+ self.debug("job object msg active processes zero")
+ self._process_events.put({self.pid: 'FINISHED'})
+ break
+ elif msgid.value == winprocess.JOB_OBJECT_MSG_NEW_PROCESS:
+ # New Process started
+ # Add the child proc to our list in case our parent flakes out on us
+ # without killing everything.
+ if pid.value != self.pid:
+ self._spawned_procs[pid.value] = 1
+ self.debug("new process detected with pid value: %s" % pid.value)
+ elif msgid.value == winprocess.JOB_OBJECT_MSG_EXIT_PROCESS:
+ self.debug("process id %s exited normally" % pid.value)
+ # One process exited normally
+ if pid.value == self.pid and len(self._spawned_procs) > 0:
+ # Parent process dying, start countdown timer
+ countdowntokill = datetime.now()
+ elif pid.value in self._spawned_procs:
+ # Child Process died remove from list
+ del(self._spawned_procs[pid.value])
+ elif msgid.value == winprocess.JOB_OBJECT_MSG_ABNORMAL_EXIT_PROCESS:
+ # One process existed abnormally
+ self.debug("process id %s exited abnormally" % pid.value)
+ if pid.value == self.pid and len(self._spawned_procs) > 0:
+ # Parent process dying, start countdown timer
+ countdowntokill = datetime.now()
+ elif pid.value in self._spawned_procs:
+ # Child Process died remove from list
+ del self._spawned_procs[pid.value]
+ else:
+ # We don't care about anything else
+ self.debug("We got a message %s" % msgid.value)
+ pass
+
+ def _wait(self):
+ # First, check to see if the process is still running
+ if self._handle:
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ else:
+ # Dude, the process is like totally dead!
+ return self.returncode
+
+ threadalive = False
+ if hasattr(self, "_procmgrthread"):
+ threadalive = self._procmgrthread.is_alive()
+ if self._job and threadalive and threading.current_thread() != self._procmgrthread:
+ self.debug("waiting with IO completion port")
+ # Then we are managing with IO Completion Ports
+ # wait on a signal so we know when we have seen the last
+ # process come through.
+ # We use queues to synchronize between the thread and this
+ # function because events just didn't have robust enough error
+ # handling on pre-2.7 versions
+ try:
+ # timeout is the max amount of time the procmgr thread will wait for
+ # child processes to shutdown before killing them with extreme prejudice.
+ item = self._process_events.get(
+ timeout=self.MAX_IOCOMPLETION_PORT_NOTIFICATION_DELAY +
+ self.MAX_PROCESS_KILL_DELAY)
+ if item[self.pid] == 'FINISHED':
+ self.debug("received 'FINISHED' from _procmgrthread")
+ self._process_events.task_done()
+ except:
+ traceback.print_exc()
+ raise OSError("IO Completion Port failed to signal process shutdown")
+ finally:
+ if self._handle:
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ self._cleanup()
+
+ else:
+ # Not managing with job objects, so all we can reasonably do
+ # is call waitforsingleobject and hope for the best
+ self.debug("waiting without IO completion port")
+
+ if not self._ignore_children:
+ self.debug("NOT USING JOB OBJECTS!!!")
+ # First, make sure we have not already ended
+ if self.returncode != winprocess.STILL_ACTIVE:
+ self._cleanup()
+ return self.returncode
+
+ rc = None
+ if self._handle:
+ rc = winprocess.WaitForSingleObject(self._handle, -1)
+
+ if rc == winprocess.WAIT_TIMEOUT:
+ # The process isn't dead, so kill it
+ print "Timed out waiting for process to close, attempting TerminateProcess"
+ self.kill()
+ elif rc == winprocess.WAIT_OBJECT_0:
+ # We caught WAIT_OBJECT_0, which indicates all is well
+ print "Single process terminated successfully"
+ self.returncode = winprocess.GetExitCodeProcess(self._handle)
+ else:
+ # An error occured we should probably throw
+ rc = winprocess.GetLastError()
+ if rc:
+ raise WinError(rc)
+
+ self._cleanup()
+
+ return self.returncode
+
+ def _cleanup_job_io_port(self):
+ """ Do the job and IO port cleanup separately because there are
+ cases where we want to clean these without killing _handle
+ (i.e. if we fail to create the job object in the first place)
+ """
+ if getattr(self, '_job') and self._job != winprocess.INVALID_HANDLE_VALUE:
+ self._job.Close()
+ self._job = None
+ else:
+ # If windows already freed our handle just set it to none
+ # (saw this intermittently while testing)
+ self._job = None
+
+ if getattr(self, '_io_port', None) and \
+ self._io_port != winprocess.INVALID_HANDLE_VALUE:
+ self._io_port.Close()
+ self._io_port = None
+ else:
+ self._io_port = None
+
+ if getattr(self, '_procmgrthread', None):
+ self._procmgrthread = None
+
+ def _cleanup(self):
+ self._cleanup_job_io_port()
+ if self._thread and self._thread != winprocess.INVALID_HANDLE_VALUE:
+ self._thread.Close()
+ self._thread = None
+ else:
+ self._thread = None
+
+ if self._handle and self._handle != winprocess.INVALID_HANDLE_VALUE:
+ self._handle.Close()
+ self._handle = None
+ else:
+ self._handle = None
+
+ elif isPosix:
+
+ def _wait(self):
+ """ Haven't found any reason to differentiate between these platforms
+ so they all use the same wait callback. If it is necessary to
+ craft different styles of wait, then a new _wait method
+ could be easily implemented.
+ """
+
+ if not self._ignore_children:
+ try:
+ # os.waitpid return value:
+ # > [...] a tuple containing its pid and exit status
+ # > indication: a 16-bit number, whose low byte is the
+ # > signal number that killed the process, and whose
+ # > high byte is the exit status (if the signal number
+ # > is zero)
+ # - http://docs.python.org/2/library/os.html#os.wait
+ status = os.waitpid(self.pid, 0)[1]
+
+ # For consistency, format status the same as subprocess'
+ # returncode attribute
+ if status > 255:
+ return status >> 8
+ return -status
+ except OSError as e:
+ if getattr(e, "errno", None) != 10:
+ # Error 10 is "no child process", which could indicate normal
+ # close
+ print >> sys.stderr, \
+ "Encountered error waiting for pid to close: %s" % e
+ raise
+
+ return self.returncode
+
+ else:
+ # For non-group wait, call base class
+ subprocess.Popen.wait(self)
+ return self.returncode
+
+ def _cleanup(self):
+ pass
+
+ else:
+ # An unrecognized platform, we will call the base class for everything
+ print >> sys.stderr, \
+ "Unrecognized platform, process groups may not be managed properly"
+
+ def _wait(self):
+ self.returncode = subprocess.Popen.wait(self)
+ return self.returncode
+
+ def _cleanup(self):
+ pass
+
+ def __init__(self,
+ cmd,
+ args=None,
+ cwd=None,
+ env=None,
+ ignore_children=False,
+ kill_on_timeout=True,
+ processOutputLine=(),
+ processStderrLine=(),
+ onTimeout=(),
+ onFinish=(),
+ **kwargs):
+ self.cmd = cmd
+ self.args = args
+ self.cwd = cwd
+ self.didTimeout = False
+ self._ignore_children = ignore_children
+ self.keywordargs = kwargs
+ self.read_buffer = ''
+
+ if env is None:
+ env = os.environ.copy()
+ self.env = env
+
+ # handlers
+ def to_callable_list(arg):
+ if callable(arg):
+ arg = [arg]
+ return CallableList(arg)
+
+ processOutputLine = to_callable_list(processOutputLine)
+ processStderrLine = to_callable_list(processStderrLine)
+ onTimeout = to_callable_list(onTimeout)
+ onFinish = to_callable_list(onFinish)
+
+ def on_timeout():
+ self.didTimeout = True
+ if kill_on_timeout:
+ self.kill()
+ onTimeout.insert(0, on_timeout)
+
+ self._stderr = subprocess.STDOUT
+ if processStderrLine:
+ self._stderr = subprocess.PIPE
+ self.reader = ProcessReader(stdout_callback=processOutputLine,
+ stderr_callback=processStderrLine,
+ finished_callback=onFinish,
+ timeout_callback=onTimeout)
+
+ # It is common for people to pass in the entire array with the cmd and
+ # the args together since this is how Popen uses it. Allow for that.
+ if isinstance(self.cmd, list):
+ if self.args is not None:
+ raise TypeError("cmd and args must not both be lists")
+ (self.cmd, self.args) = (self.cmd[0], self.cmd[1:])
+ elif self.args is None:
+ self.args = []
+
+ @property
+ def timedOut(self):
+ """True if the process has timed out."""
+ return self.didTimeout
+
+ @property
+ def commandline(self):
+ """the string value of the command line (command + args)"""
+ return subprocess.list2cmdline([self.cmd] + self.args)
+
+ def run(self, timeout=None, outputTimeout=None):
+ """
+ Starts the process.
+
+ If timeout is not None, the process will be allowed to continue for
+ that number of seconds before being killed. If the process is killed
+ due to a timeout, the onTimeout handler will be called.
+
+ If outputTimeout is not None, the process will be allowed to continue
+ for that number of seconds without producing any output before
+ being killed.
+ """
+ self.didTimeout = False
+
+ # default arguments
+ args = dict(stdout=subprocess.PIPE,
+ stderr=self._stderr,
+ cwd=self.cwd,
+ env=self.env,
+ ignore_children=self._ignore_children)
+
+ # build process arguments
+ args.update(self.keywordargs)
+
+ # launch the process
+ self.proc = self.Process([self.cmd] + self.args, **args)
+
+ if isPosix:
+ # Keep track of the initial process group in case the process detaches itself
+ self.proc.pgid = os.getpgid(self.proc.pid)
+ self.proc.detached_pid = None
+
+ self.processOutput(timeout=timeout, outputTimeout=outputTimeout)
+
+ def kill(self, sig=None):
+ """
+ Kills the managed process.
+
+ If you created the process with 'ignore_children=False' (the
+ default) then it will also also kill all child processes spawned by
+ it. If you specified 'ignore_children=True' when creating the
+ process, only the root process will be killed.
+
+ Note that this does not manage any state, save any output etc,
+ it immediately kills the process.
+
+ :param sig: Signal used to kill the process, defaults to SIGKILL
+ (has no effect on Windows)
+ """
+ if not hasattr(self, 'proc'):
+ raise RuntimeError("Calling kill() on a non started process is not"
+ " allowed.")
+ self.proc.kill(sig=sig)
+
+ # When we kill the the managed process we also have to wait for the
+ # reader thread to be finished. Otherwise consumers would have to assume
+ # that it still has not completely shutdown.
+ return self.wait()
+
+ def poll(self):
+ """Check if child process has terminated
+
+ Returns the current returncode value:
+ - None if the process hasn't terminated yet
+ - A negative number if the process was killed by signal N (Unix only)
+ - '0' if the process ended without failures
+
+ """
+ # Ensure that we first check for the reader status. Otherwise
+ # we might mark the process as finished while output is still getting
+ # processed.
+ if not hasattr(self, 'proc'):
+ raise RuntimeError("Calling poll() on a non started process is not"
+ " allowed.")
+ elif self.reader.is_alive():
+ return None
+ elif hasattr(self.proc, "returncode"):
+ return self.proc.returncode
+ else:
+ return self.proc.poll()
+
+ def processOutput(self, timeout=None, outputTimeout=None):
+ """
+ Handle process output until the process terminates or times out.
+
+ If timeout is not None, the process will be allowed to continue for
+ that number of seconds before being killed.
+
+ If outputTimeout is not None, the process will be allowed to continue
+ for that number of seconds without producing any output before
+ being killed.
+ """
+ # this method is kept for backward compatibility
+ if not hasattr(self, 'proc'):
+ self.run(timeout=timeout, outputTimeout=outputTimeout)
+ # self.run will call this again
+ return
+ if not self.reader.is_alive():
+ self.reader.timeout = timeout
+ self.reader.output_timeout = outputTimeout
+ self.reader.start(self.proc)
+
+ def wait(self, timeout=None):
+ """
+ Waits until all output has been read and the process is
+ terminated.
+
+ If timeout is not None, will return after timeout seconds.
+ This timeout only causes the wait function to return and
+ does not kill the process.
+
+ Returns the process exit code value:
+ - None if the process hasn't terminated yet
+ - A negative number if the process was killed by signal N (Unix only)
+ - '0' if the process ended without failures
+
+ """
+ if self.reader.thread and self.reader.thread is not threading.current_thread():
+ # Thread.join() blocks the main thread until the reader thread is finished
+ # wake up once a second in case a keyboard interrupt is sent
+ count = 0
+ while self.reader.is_alive():
+ self.reader.thread.join(timeout=1)
+ count += 1
+ if timeout and count > timeout:
+ return None
+
+ self.returncode = self.proc.wait()
+ return self.returncode
+
+ # TODO Remove this method when consumers have been fixed
+ def waitForFinish(self, timeout=None):
+ print >> sys.stderr, "MOZPROCESS WARNING: ProcessHandler.waitForFinish() is deprecated, " \
+ "use ProcessHandler.wait() instead"
+ return self.wait(timeout=timeout)
+
+ @property
+ def pid(self):
+ return self.proc.pid
+
+ def check_for_detached(self, new_pid):
+ """Check if the current process has been detached and mark it appropriately.
+
+ In case of application restarts the process can spawn itself into a new process group.
+ From now on the process can no longer be tracked by mozprocess anymore and has to be
+ marked as detached. If the consumer of mozprocess still knows the new process id it could
+ check for the detached state.
+
+ new_pid is the new process id of the child process.
+ """
+ if not self.proc:
+ return
+
+ if isPosix:
+ new_pgid = None
+ try:
+ new_pgid = os.getpgid(new_pid)
+ except OSError as e:
+ # Do not consume errors except "No such process"
+ if e.errno != 3:
+ raise
+
+ if new_pgid and new_pgid != self.proc.pgid:
+ self.proc.detached_pid = new_pid
+ print >> sys.stdout, \
+ 'Child process with id "%s" has been marked as detached because it is no ' \
+ 'longer in the managed process group. Keeping reference to the process id ' \
+ '"%s" which is the new child process.' % (self.pid, new_pid)
+
+
+class CallableList(list):
+
+ def __call__(self, *args, **kwargs):
+ for e in self:
+ e(*args, **kwargs)
+
+ def __add__(self, lst):
+ return CallableList(list.__add__(self, lst))
+
+
+class ProcessReader(object):
+
+ def __init__(self, stdout_callback=None, stderr_callback=None,
+ finished_callback=None, timeout_callback=None,
+ timeout=None, output_timeout=None):
+ self.stdout_callback = stdout_callback or (lambda line: True)
+ self.stderr_callback = stderr_callback or (lambda line: True)
+ self.finished_callback = finished_callback or (lambda: True)
+ self.timeout_callback = timeout_callback or (lambda: True)
+ self.timeout = timeout
+ self.output_timeout = output_timeout
+ self.thread = None
+
+ def _create_stream_reader(self, name, stream, queue, callback):
+ thread = threading.Thread(name=name,
+ target=self._read_stream,
+ args=(stream, queue, callback))
+ thread.daemon = True
+ thread.start()
+ return thread
+
+ def _read_stream(self, stream, queue, callback):
+ while True:
+ line = stream.readline()
+ if not line:
+ break
+ queue.put((line, callback))
+ stream.close()
+
+ def start(self, proc):
+ queue = Queue()
+ stdout_reader = None
+ if proc.stdout:
+ stdout_reader = self._create_stream_reader('ProcessReaderStdout',
+ proc.stdout,
+ queue,
+ self.stdout_callback)
+ stderr_reader = None
+ if proc.stderr and proc.stderr != proc.stdout:
+ stderr_reader = self._create_stream_reader('ProcessReaderStderr',
+ proc.stderr,
+ queue,
+ self.stderr_callback)
+ self.thread = threading.Thread(name='ProcessReader',
+ target=self._read,
+ args=(stdout_reader,
+ stderr_reader,
+ queue))
+ self.thread.daemon = True
+ self.thread.start()
+
+ def _read(self, stdout_reader, stderr_reader, queue):
+ start_time = time.time()
+ timed_out = False
+ timeout = self.timeout
+ if timeout is not None:
+ timeout += start_time
+ output_timeout = self.output_timeout
+ if output_timeout is not None:
+ output_timeout += start_time
+
+ while (stdout_reader and stdout_reader.is_alive()) \
+ or (stderr_reader and stderr_reader.is_alive()):
+ has_line = True
+ try:
+ line, callback = queue.get(True, 0.02)
+ except Empty:
+ has_line = False
+ now = time.time()
+ if not has_line:
+ if output_timeout is not None and now > output_timeout:
+ timed_out = True
+ break
+ else:
+ if output_timeout is not None:
+ output_timeout = now + self.output_timeout
+ callback(line.rstrip())
+ if timeout is not None and now > timeout:
+ timed_out = True
+ break
+ # process remaining lines to read
+ while not queue.empty():
+ line, callback = queue.get(False)
+ callback(line.rstrip())
+ if timed_out:
+ self.timeout_callback()
+ if stdout_reader:
+ stdout_reader.join()
+ if stderr_reader:
+ stderr_reader.join()
+ if not timed_out:
+ self.finished_callback()
+
+ def is_alive(self):
+ if self.thread:
+ return self.thread.is_alive()
+ return False
+
+# default output handlers
+# these should be callables that take the output line
+
+
+class StoreOutput(object):
+ """accumulate stdout"""
+
+ def __init__(self):
+ self.output = []
+
+ def __call__(self, line):
+ self.output.append(line)
+
+
+class StreamOutput(object):
+ """pass output to a stream and flush"""
+
+ def __init__(self, stream):
+ self.stream = stream
+
+ def __call__(self, line):
+ try:
+ self.stream.write(line + '\n')
+ except UnicodeDecodeError:
+ # TODO: Workaround for bug #991866 to make sure we can display when
+ # when normal UTF-8 display is failing
+ self.stream.write(line.decode('iso8859-1') + '\n')
+ self.stream.flush()
+
+
+class LogOutput(StreamOutput):
+ """pass output to a file"""
+
+ def __init__(self, filename):
+ self.file_obj = open(filename, 'a')
+ StreamOutput.__init__(self, self.file_obj)
+
+ def __del__(self):
+ if self.file_obj is not None:
+ self.file_obj.close()
+
+
+# front end class with the default handlers
+
+
+class ProcessHandler(ProcessHandlerMixin):
+ """
+ Convenience class for handling processes with default output handlers.
+
+ By default, all output is sent to stdout. This can be disabled by setting
+ the *stream* argument to None.
+
+ If processOutputLine keyword argument is specified the function or the
+ list of functions specified by this argument will be called for each line
+ of output; the output will not be written to stdout automatically then
+ if stream is True (the default).
+
+ If storeOutput==True, the output produced by the process will be saved
+ as self.output.
+
+ If logfile is not None, the output produced by the process will be
+ appended to the given file.
+ """
+
+ def __init__(self, cmd, logfile=None, stream=True, storeOutput=True,
+ **kwargs):
+ kwargs.setdefault('processOutputLine', [])
+ if callable(kwargs['processOutputLine']):
+ kwargs['processOutputLine'] = [kwargs['processOutputLine']]
+
+ if logfile:
+ logoutput = LogOutput(logfile)
+ kwargs['processOutputLine'].append(logoutput)
+
+ if stream is True:
+ # Print to standard output only if no outputline provided
+ if not kwargs['processOutputLine']:
+ kwargs['processOutputLine'].append(StreamOutput(sys.stdout))
+ elif stream:
+ streamoutput = StreamOutput(stream)
+ kwargs['processOutputLine'].append(streamoutput)
+
+ self.output = None
+ if storeOutput:
+ storeoutput = StoreOutput()
+ self.output = storeoutput.output
+ kwargs['processOutputLine'].append(storeoutput)
+
+ ProcessHandlerMixin.__init__(self, cmd, **kwargs)
diff --git a/testing/mozbase/mozprocess/mozprocess/qijo.py b/testing/mozbase/mozprocess/mozprocess/qijo.py
new file mode 100644
index 000000000..ce23909fa
--- /dev/null
+++ b/testing/mozbase/mozprocess/mozprocess/qijo.py
@@ -0,0 +1,166 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+from ctypes import (
+ c_void_p,
+ POINTER,
+ sizeof,
+ Structure,
+ windll,
+ WinError,
+ WINFUNCTYPE,
+ addressof,
+ c_size_t,
+ c_ulong
+)
+
+from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LARGE_INTEGER
+
+LPVOID = c_void_p
+LPDWORD = POINTER(DWORD)
+SIZE_T = c_size_t
+ULONG_PTR = POINTER(c_ulong)
+
+# A ULONGLONG is a 64-bit unsigned integer.
+# Thus there are 8 bytes in a ULONGLONG.
+# XXX why not import c_ulonglong ?
+ULONGLONG = BYTE * 8
+
+
+class IO_COUNTERS(Structure):
+ # The IO_COUNTERS struct is 6 ULONGLONGs.
+ # TODO: Replace with non-dummy fields.
+ _fields_ = [('dummy', ULONGLONG * 6)]
+
+
+class JOBOBJECT_BASIC_ACCOUNTING_INFORMATION(Structure):
+ _fields_ = [('TotalUserTime', LARGE_INTEGER),
+ ('TotalKernelTime', LARGE_INTEGER),
+ ('ThisPeriodTotalUserTime', LARGE_INTEGER),
+ ('ThisPeriodTotalKernelTime', LARGE_INTEGER),
+ ('TotalPageFaultCount', DWORD),
+ ('TotalProcesses', DWORD),
+ ('ActiveProcesses', DWORD),
+ ('TotalTerminatedProcesses', DWORD)]
+
+
+class JOBOBJECT_BASIC_AND_IO_ACCOUNTING_INFORMATION(Structure):
+ _fields_ = [('BasicInfo', JOBOBJECT_BASIC_ACCOUNTING_INFORMATION),
+ ('IoInfo', IO_COUNTERS)]
+
+
+# see http://msdn.microsoft.com/en-us/library/ms684147%28VS.85%29.aspx
+class JOBOBJECT_BASIC_LIMIT_INFORMATION(Structure):
+ _fields_ = [('PerProcessUserTimeLimit', LARGE_INTEGER),
+ ('PerJobUserTimeLimit', LARGE_INTEGER),
+ ('LimitFlags', DWORD),
+ ('MinimumWorkingSetSize', SIZE_T),
+ ('MaximumWorkingSetSize', SIZE_T),
+ ('ActiveProcessLimit', DWORD),
+ ('Affinity', ULONG_PTR),
+ ('PriorityClass', DWORD),
+ ('SchedulingClass', DWORD)
+ ]
+
+
+class JOBOBJECT_ASSOCIATE_COMPLETION_PORT(Structure):
+ _fields_ = [('CompletionKey', c_ulong),
+ ('CompletionPort', HANDLE)]
+
+
+# see http://msdn.microsoft.com/en-us/library/ms684156%28VS.85%29.aspx
+class JOBOBJECT_EXTENDED_LIMIT_INFORMATION(Structure):
+ _fields_ = [('BasicLimitInformation', JOBOBJECT_BASIC_LIMIT_INFORMATION),
+ ('IoInfo', IO_COUNTERS),
+ ('ProcessMemoryLimit', SIZE_T),
+ ('JobMemoryLimit', SIZE_T),
+ ('PeakProcessMemoryUsed', SIZE_T),
+ ('PeakJobMemoryUsed', SIZE_T)]
+
+# These numbers below come from:
+# http://msdn.microsoft.com/en-us/library/ms686216%28v=vs.85%29.aspx
+JobObjectAssociateCompletionPortInformation = 7
+JobObjectBasicAndIoAccountingInformation = 8
+JobObjectExtendedLimitInformation = 9
+
+
+class JobObjectInfo(object):
+ mapping = {'JobObjectBasicAndIoAccountingInformation': 8,
+ 'JobObjectExtendedLimitInformation': 9,
+ 'JobObjectAssociateCompletionPortInformation': 7}
+ structures = {
+ 7: JOBOBJECT_ASSOCIATE_COMPLETION_PORT,
+ 8: JOBOBJECT_BASIC_AND_IO_ACCOUNTING_INFORMATION,
+ 9: JOBOBJECT_EXTENDED_LIMIT_INFORMATION
+ }
+
+ def __init__(self, _class):
+ if isinstance(_class, basestring):
+ assert _class in self.mapping, \
+ 'Class should be one of %s; you gave %s' % (self.mapping, _class)
+ _class = self.mapping[_class]
+ assert _class in self.structures, \
+ 'Class should be one of %s; you gave %s' % (self.structures, _class)
+ self.code = _class
+ self.info = self.structures[_class]()
+
+
+QueryInformationJobObjectProto = WINFUNCTYPE(
+ BOOL, # Return type
+ HANDLE, # hJob
+ DWORD, # JobObjectInfoClass
+ LPVOID, # lpJobObjectInfo
+ DWORD, # cbJobObjectInfoLength
+ LPDWORD # lpReturnLength
+)
+
+QueryInformationJobObjectFlags = (
+ (1, 'hJob'),
+ (1, 'JobObjectInfoClass'),
+ (1, 'lpJobObjectInfo'),
+ (1, 'cbJobObjectInfoLength'),
+ (1, 'lpReturnLength', None)
+)
+
+_QueryInformationJobObject = QueryInformationJobObjectProto(
+ ('QueryInformationJobObject', windll.kernel32),
+ QueryInformationJobObjectFlags
+)
+
+
+class SubscriptableReadOnlyStruct(object):
+
+ def __init__(self, struct):
+ self._struct = struct
+
+ def _delegate(self, name):
+ result = getattr(self._struct, name)
+ if isinstance(result, Structure):
+ return SubscriptableReadOnlyStruct(result)
+ return result
+
+ def __getitem__(self, name):
+ match = [fname for fname, ftype in self._struct._fields_
+ if fname == name]
+ if match:
+ return self._delegate(name)
+ raise KeyError(name)
+
+ def __getattr__(self, name):
+ return self._delegate(name)
+
+
+def QueryInformationJobObject(hJob, JobObjectInfoClass):
+ jobinfo = JobObjectInfo(JobObjectInfoClass)
+ result = _QueryInformationJobObject(
+ hJob=hJob,
+ JobObjectInfoClass=jobinfo.code,
+ lpJobObjectInfo=addressof(jobinfo.info),
+ cbJobObjectInfoLength=sizeof(jobinfo.info)
+ )
+ if not result:
+ raise WinError()
+ return SubscriptableReadOnlyStruct(jobinfo.info)
diff --git a/testing/mozbase/mozprocess/mozprocess/winprocess.py b/testing/mozbase/mozprocess/mozprocess/winprocess.py
new file mode 100644
index 000000000..b748f8f30
--- /dev/null
+++ b/testing/mozbase/mozprocess/mozprocess/winprocess.py
@@ -0,0 +1,479 @@
+# A module to expose various thread/process/job related structures and
+# methods from kernel32
+#
+# The MIT License
+#
+# Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
+#
+# Additions and modifications written by Benjamin Smedberg
+# <benjamin@smedbergs.us> are Copyright (c) 2006 by the Mozilla Foundation
+# <http://www.mozilla.org/>
+#
+# More Modifications
+# Copyright (c) 2006-2007 by Mike Taylor <bear@code-bear.com>
+# Copyright (c) 2007-2008 by Mikeal Rogers <mikeal@mozilla.com>
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of the
+# author not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import absolute_import, unicode_literals
+
+import sys
+import subprocess
+
+from ctypes import c_void_p, POINTER, sizeof, Structure, windll, WinError, WINFUNCTYPE, c_ulong
+from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPWSTR, UINT, WORD
+from .qijo import QueryInformationJobObject
+
+LPVOID = c_void_p
+LPBYTE = POINTER(BYTE)
+LPDWORD = POINTER(DWORD)
+LPBOOL = POINTER(BOOL)
+LPULONG = POINTER(c_ulong)
+
+
+def ErrCheckBool(result, func, args):
+ """errcheck function for Windows functions that return a BOOL True
+ on success"""
+ if not result:
+ raise WinError()
+ return args
+
+
+# AutoHANDLE
+
+class AutoHANDLE(HANDLE):
+ """Subclass of HANDLE which will call CloseHandle() on deletion."""
+
+ CloseHandleProto = WINFUNCTYPE(BOOL, HANDLE)
+ CloseHandle = CloseHandleProto(("CloseHandle", windll.kernel32))
+ CloseHandle.errcheck = ErrCheckBool
+
+ def Close(self):
+ if self.value and self.value != HANDLE(-1).value:
+ self.CloseHandle(self)
+ self.value = 0
+
+ def __del__(self):
+ self.Close()
+
+ def __int__(self):
+ return self.value
+
+
+def ErrCheckHandle(result, func, args):
+ """errcheck function for Windows functions that return a HANDLE."""
+ if not result:
+ raise WinError()
+ return AutoHANDLE(result)
+
+# PROCESS_INFORMATION structure
+
+
+class PROCESS_INFORMATION(Structure):
+ _fields_ = [("hProcess", HANDLE),
+ ("hThread", HANDLE),
+ ("dwProcessID", DWORD),
+ ("dwThreadID", DWORD)]
+
+ def __init__(self):
+ Structure.__init__(self)
+
+ self.cb = sizeof(self)
+
+LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
+
+# STARTUPINFO structure
+
+
+class STARTUPINFO(Structure):
+ _fields_ = [("cb", DWORD),
+ ("lpReserved", LPWSTR),
+ ("lpDesktop", LPWSTR),
+ ("lpTitle", LPWSTR),
+ ("dwX", DWORD),
+ ("dwY", DWORD),
+ ("dwXSize", DWORD),
+ ("dwYSize", DWORD),
+ ("dwXCountChars", DWORD),
+ ("dwYCountChars", DWORD),
+ ("dwFillAttribute", DWORD),
+ ("dwFlags", DWORD),
+ ("wShowWindow", WORD),
+ ("cbReserved2", WORD),
+ ("lpReserved2", LPBYTE),
+ ("hStdInput", HANDLE),
+ ("hStdOutput", HANDLE),
+ ("hStdError", HANDLE)
+ ]
+LPSTARTUPINFO = POINTER(STARTUPINFO)
+
+SW_HIDE = 0
+
+STARTF_USESHOWWINDOW = 0x01
+STARTF_USESIZE = 0x02
+STARTF_USEPOSITION = 0x04
+STARTF_USECOUNTCHARS = 0x08
+STARTF_USEFILLATTRIBUTE = 0x10
+STARTF_RUNFULLSCREEN = 0x20
+STARTF_FORCEONFEEDBACK = 0x40
+STARTF_FORCEOFFFEEDBACK = 0x80
+STARTF_USESTDHANDLES = 0x100
+
+# EnvironmentBlock
+
+
+class EnvironmentBlock:
+ """An object which can be passed as the lpEnv parameter of CreateProcess.
+ It is initialized with a dictionary."""
+
+ def __init__(self, env):
+ if not env:
+ self._as_parameter_ = None
+ else:
+ values = []
+ fs_encoding = sys.getfilesystemencoding() or 'mbcs'
+ for k, v in env.iteritems():
+ if isinstance(k, bytes):
+ k = k.decode(fs_encoding, 'replace')
+ if isinstance(v, bytes):
+ v = v.decode(fs_encoding, 'replace')
+ values.append("{}={}".format(k, v))
+ values.append("")
+ self._as_parameter_ = LPCWSTR("\0".join(values))
+
+# Error Messages we need to watch for go here
+# See: http://msdn.microsoft.com/en-us/library/ms681388%28v=vs.85%29.aspx
+ERROR_ABANDONED_WAIT_0 = 735
+
+# GetLastError()
+GetLastErrorProto = WINFUNCTYPE(DWORD) # Return Type
+GetLastErrorFlags = ()
+GetLastError = GetLastErrorProto(("GetLastError", windll.kernel32), GetLastErrorFlags)
+
+# CreateProcess()
+
+CreateProcessProto = WINFUNCTYPE(BOOL, # Return type
+ LPCWSTR, # lpApplicationName
+ LPWSTR, # lpCommandLine
+ LPVOID, # lpProcessAttributes
+ LPVOID, # lpThreadAttributes
+ BOOL, # bInheritHandles
+ DWORD, # dwCreationFlags
+ LPVOID, # lpEnvironment
+ LPCWSTR, # lpCurrentDirectory
+ LPSTARTUPINFO, # lpStartupInfo
+ LPPROCESS_INFORMATION # lpProcessInformation
+ )
+
+CreateProcessFlags = ((1, "lpApplicationName", None),
+ (1, "lpCommandLine"),
+ (1, "lpProcessAttributes", None),
+ (1, "lpThreadAttributes", None),
+ (1, "bInheritHandles", True),
+ (1, "dwCreationFlags", 0),
+ (1, "lpEnvironment", None),
+ (1, "lpCurrentDirectory", None),
+ (1, "lpStartupInfo"),
+ (2, "lpProcessInformation"))
+
+
+def ErrCheckCreateProcess(result, func, args):
+ ErrCheckBool(result, func, args)
+ # return a tuple (hProcess, hThread, dwProcessID, dwThreadID)
+ pi = args[9]
+ return AutoHANDLE(pi.hProcess), AutoHANDLE(pi.hThread), pi.dwProcessID, pi.dwThreadID
+
+CreateProcess = CreateProcessProto(("CreateProcessW", windll.kernel32),
+ CreateProcessFlags)
+CreateProcess.errcheck = ErrCheckCreateProcess
+
+# flags for CreateProcess
+CREATE_BREAKAWAY_FROM_JOB = 0x01000000
+CREATE_DEFAULT_ERROR_MODE = 0x04000000
+CREATE_NEW_CONSOLE = 0x00000010
+CREATE_NEW_PROCESS_GROUP = 0x00000200
+CREATE_NO_WINDOW = 0x08000000
+CREATE_SUSPENDED = 0x00000004
+CREATE_UNICODE_ENVIRONMENT = 0x00000400
+
+# Flags for IOCompletion ports (some of these would probably be defined if
+# we used the win32 extensions for python, but we don't want to do that if we
+# can help it.
+INVALID_HANDLE_VALUE = HANDLE(-1) # From winbase.h
+
+# Self Defined Constants for IOPort <--> Job Object communication
+COMPKEY_TERMINATE = c_ulong(0)
+COMPKEY_JOBOBJECT = c_ulong(1)
+
+# flags for job limit information
+# see http://msdn.microsoft.com/en-us/library/ms684147%28VS.85%29.aspx
+JOB_OBJECT_LIMIT_BREAKAWAY_OK = 0x00000800
+JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK = 0x00001000
+
+# Flags for Job Object Completion Port Message IDs from winnt.h
+# See also: http://msdn.microsoft.com/en-us/library/ms684141%28v=vs.85%29.aspx
+JOB_OBJECT_MSG_END_OF_JOB_TIME = 1
+JOB_OBJECT_MSG_END_OF_PROCESS_TIME = 2
+JOB_OBJECT_MSG_ACTIVE_PROCESS_LIMIT = 3
+JOB_OBJECT_MSG_ACTIVE_PROCESS_ZERO = 4
+JOB_OBJECT_MSG_NEW_PROCESS = 6
+JOB_OBJECT_MSG_EXIT_PROCESS = 7
+JOB_OBJECT_MSG_ABNORMAL_EXIT_PROCESS = 8
+JOB_OBJECT_MSG_PROCESS_MEMORY_LIMIT = 9
+JOB_OBJECT_MSG_JOB_MEMORY_LIMIT = 10
+
+# See winbase.h
+DEBUG_ONLY_THIS_PROCESS = 0x00000002
+DEBUG_PROCESS = 0x00000001
+DETACHED_PROCESS = 0x00000008
+
+# GetQueuedCompletionPortStatus -
+# http://msdn.microsoft.com/en-us/library/aa364986%28v=vs.85%29.aspx
+GetQueuedCompletionStatusProto = WINFUNCTYPE(BOOL, # Return Type
+ HANDLE, # Completion Port
+ LPDWORD, # Msg ID
+ LPULONG, # Completion Key
+ # PID Returned from the call (may be null)
+ LPULONG,
+ DWORD) # milliseconds to wait
+GetQueuedCompletionStatusFlags = ((1, "CompletionPort", INVALID_HANDLE_VALUE),
+ (1, "lpNumberOfBytes", None),
+ (1, "lpCompletionKey", None),
+ (1, "lpPID", None),
+ (1, "dwMilliseconds", 0))
+GetQueuedCompletionStatus = GetQueuedCompletionStatusProto(("GetQueuedCompletionStatus",
+ windll.kernel32),
+ GetQueuedCompletionStatusFlags)
+
+# CreateIOCompletionPort
+# Note that the completion key is just a number, not a pointer.
+CreateIoCompletionPortProto = WINFUNCTYPE(HANDLE, # Return Type
+ HANDLE, # File Handle
+ HANDLE, # Existing Completion Port
+ c_ulong, # Completion Key
+ DWORD) # Number of Threads
+
+CreateIoCompletionPortFlags = ((1, "FileHandle", INVALID_HANDLE_VALUE),
+ (1, "ExistingCompletionPort", 0),
+ (1, "CompletionKey", c_ulong(0)),
+ (1, "NumberOfConcurrentThreads", 0))
+CreateIoCompletionPort = CreateIoCompletionPortProto(("CreateIoCompletionPort",
+ windll.kernel32),
+ CreateIoCompletionPortFlags)
+CreateIoCompletionPort.errcheck = ErrCheckHandle
+
+# SetInformationJobObject
+SetInformationJobObjectProto = WINFUNCTYPE(BOOL, # Return Type
+ HANDLE, # Job Handle
+ DWORD, # Type of Class next param is
+ LPVOID, # Job Object Class
+ DWORD) # Job Object Class Length
+
+SetInformationJobObjectProtoFlags = ((1, "hJob", None),
+ (1, "JobObjectInfoClass", None),
+ (1, "lpJobObjectInfo", None),
+ (1, "cbJobObjectInfoLength", 0))
+SetInformationJobObject = SetInformationJobObjectProto(("SetInformationJobObject",
+ windll.kernel32),
+ SetInformationJobObjectProtoFlags)
+SetInformationJobObject.errcheck = ErrCheckBool
+
+# CreateJobObject()
+CreateJobObjectProto = WINFUNCTYPE(HANDLE, # Return type
+ LPVOID, # lpJobAttributes
+ LPCWSTR # lpName
+ )
+
+CreateJobObjectFlags = ((1, "lpJobAttributes", None),
+ (1, "lpName", None))
+
+CreateJobObject = CreateJobObjectProto(("CreateJobObjectW", windll.kernel32),
+ CreateJobObjectFlags)
+CreateJobObject.errcheck = ErrCheckHandle
+
+# AssignProcessToJobObject()
+
+AssignProcessToJobObjectProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hJob
+ HANDLE # hProcess
+ )
+AssignProcessToJobObjectFlags = ((1, "hJob"),
+ (1, "hProcess"))
+AssignProcessToJobObject = AssignProcessToJobObjectProto(
+ ("AssignProcessToJobObject", windll.kernel32),
+ AssignProcessToJobObjectFlags)
+AssignProcessToJobObject.errcheck = ErrCheckBool
+
+# GetCurrentProcess()
+# because os.getPid() is way too easy
+GetCurrentProcessProto = WINFUNCTYPE(HANDLE # Return type
+ )
+GetCurrentProcessFlags = ()
+GetCurrentProcess = GetCurrentProcessProto(
+ ("GetCurrentProcess", windll.kernel32),
+ GetCurrentProcessFlags)
+GetCurrentProcess.errcheck = ErrCheckHandle
+
+# IsProcessInJob()
+try:
+ IsProcessInJobProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # Process Handle
+ HANDLE, # Job Handle
+ LPBOOL # Result
+ )
+ IsProcessInJobFlags = ((1, "ProcessHandle"),
+ (1, "JobHandle", HANDLE(0)),
+ (2, "Result"))
+ IsProcessInJob = IsProcessInJobProto(
+ ("IsProcessInJob", windll.kernel32),
+ IsProcessInJobFlags)
+ IsProcessInJob.errcheck = ErrCheckBool
+except AttributeError:
+ # windows 2k doesn't have this API
+ def IsProcessInJob(process):
+ return False
+
+
+# ResumeThread()
+
+def ErrCheckResumeThread(result, func, args):
+ if result == -1:
+ raise WinError()
+
+ return args
+
+ResumeThreadProto = WINFUNCTYPE(DWORD, # Return type
+ HANDLE # hThread
+ )
+ResumeThreadFlags = ((1, "hThread"),)
+ResumeThread = ResumeThreadProto(("ResumeThread", windll.kernel32),
+ ResumeThreadFlags)
+ResumeThread.errcheck = ErrCheckResumeThread
+
+# TerminateProcess()
+
+TerminateProcessProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hProcess
+ UINT # uExitCode
+ )
+TerminateProcessFlags = ((1, "hProcess"),
+ (1, "uExitCode", 127))
+TerminateProcess = TerminateProcessProto(
+ ("TerminateProcess", windll.kernel32),
+ TerminateProcessFlags)
+TerminateProcess.errcheck = ErrCheckBool
+
+# TerminateJobObject()
+
+TerminateJobObjectProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hJob
+ UINT # uExitCode
+ )
+TerminateJobObjectFlags = ((1, "hJob"),
+ (1, "uExitCode", 127))
+TerminateJobObject = TerminateJobObjectProto(
+ ("TerminateJobObject", windll.kernel32),
+ TerminateJobObjectFlags)
+TerminateJobObject.errcheck = ErrCheckBool
+
+# WaitForSingleObject()
+
+WaitForSingleObjectProto = WINFUNCTYPE(DWORD, # Return type
+ HANDLE, # hHandle
+ DWORD, # dwMilliseconds
+ )
+WaitForSingleObjectFlags = ((1, "hHandle"),
+ (1, "dwMilliseconds", -1))
+WaitForSingleObject = WaitForSingleObjectProto(
+ ("WaitForSingleObject", windll.kernel32),
+ WaitForSingleObjectFlags)
+
+# http://msdn.microsoft.com/en-us/library/ms681381%28v=vs.85%29.aspx
+INFINITE = -1
+WAIT_TIMEOUT = 0x0102
+WAIT_OBJECT_0 = 0x0
+WAIT_ABANDONED = 0x0080
+
+# http://msdn.microsoft.com/en-us/library/ms683189%28VS.85%29.aspx
+STILL_ACTIVE = 259
+
+# Used when we terminate a process.
+ERROR_CONTROL_C_EXIT = 0x23c
+
+# GetExitCodeProcess()
+
+GetExitCodeProcessProto = WINFUNCTYPE(BOOL, # Return type
+ HANDLE, # hProcess
+ LPDWORD, # lpExitCode
+ )
+GetExitCodeProcessFlags = ((1, "hProcess"),
+ (2, "lpExitCode"))
+GetExitCodeProcess = GetExitCodeProcessProto(
+ ("GetExitCodeProcess", windll.kernel32),
+ GetExitCodeProcessFlags)
+GetExitCodeProcess.errcheck = ErrCheckBool
+
+
+def CanCreateJobObject():
+ currentProc = GetCurrentProcess()
+ if IsProcessInJob(currentProc):
+ jobinfo = QueryInformationJobObject(HANDLE(0), 'JobObjectExtendedLimitInformation')
+ limitflags = jobinfo['BasicLimitInformation']['LimitFlags']
+ return bool(limitflags & JOB_OBJECT_LIMIT_BREAKAWAY_OK) or \
+ bool(limitflags & JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK)
+ else:
+ return True
+
+# testing functions
+
+
+def parent():
+ print 'Starting parent'
+ currentProc = GetCurrentProcess()
+ if IsProcessInJob(currentProc):
+ print >> sys.stderr, "You should not be in a job object to test"
+ sys.exit(1)
+ assert CanCreateJobObject()
+ print 'File: %s' % __file__
+ command = [sys.executable, __file__, '-child']
+ print 'Running command: %s' % command
+ process = subprocess.Popen(command)
+ process.kill()
+ code = process.returncode
+ print 'Child code: %s' % code
+ assert code == 127
+
+
+def child():
+ print 'Starting child'
+ currentProc = GetCurrentProcess()
+ injob = IsProcessInJob(currentProc)
+ print "Is in a job?: %s" % injob
+ can_create = CanCreateJobObject()
+ print 'Can create job?: %s' % can_create
+ process = subprocess.Popen('c:\\windows\\notepad.exe')
+ assert process._job
+ jobinfo = QueryInformationJobObject(process._job, 'JobObjectExtendedLimitInformation')
+ print 'Job info: %s' % jobinfo
+ limitflags = jobinfo['BasicLimitInformation']['LimitFlags']
+ print 'LimitFlags: %s' % limitflags
+ process.kill()
diff --git a/testing/mozbase/mozprocess/setup.py b/testing/mozbase/mozprocess/setup.py
new file mode 100644
index 000000000..fc8225a12
--- /dev/null
+++ b/testing/mozbase/mozprocess/setup.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = '0.23'
+
+setup(name='mozprocess',
+ version=PACKAGE_VERSION,
+ description="Mozilla-authored process handling",
+ long_description='see http://mozbase.readthedocs.org/',
+ classifiers=['Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL 2.0',
+ packages=['mozprocess'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=['mozinfo'],
+ entry_points="""
+ # -*- Entry points: -*-
+ """,
+ )
diff --git a/testing/mozbase/mozprocess/tests/Makefile b/testing/mozbase/mozprocess/tests/Makefile
new file mode 100644
index 000000000..ea7163b00
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/Makefile
@@ -0,0 +1,55 @@
+#
+# mozprocess proclaunch tests Makefile
+#
+
+# include rules for platform determination
+include iniparser/platform.mk
+
+ifeq ($(WIN32), 1)
+# Win 32
+CC = cl
+LINK = link
+CFLAGS = //Od //I "iniparser" //D "WIN32" //D "_WIN32" //D "_DEBUG" //D "_CONSOLE" //D "_UNICODE" //D "UNICODE" //Gm //EHsc //RTC1 //MDd //W3 //nologo //c //ZI //TC
+LFLAGS = //OUT:"proclaunch.exe" //INCREMENTAL //LIBPATH:"iniparser\\" //NOLOGO //DEBUG //SUBSYSTEM:CONSOLE //DYNAMICBASE //NXCOMPAT //ERRORREPORT:PROMPT iniparser.lib kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib
+RM = rm -f
+
+all: iniparser proclaunch
+
+iniparser:
+ $(MAKE) -C iniparser
+
+proclaunch.obj: proclaunch.c
+ @(echo "compiling proclaunch; platform: $(UNAME), WIN32: $(WIN32)")
+ $(CC) $(CFLAGS) proclaunch.c
+
+proclaunch: proclaunch.obj
+ $(LINK) $(LFLAGS) proclaunch.obj
+
+clean:
+ $(RM) proclaunch.exe proclaunch.obj
+else
+# *nix/Mac
+LFLAGS = -L.. -liniparser
+AR = ar
+ARFLAGS = rcv
+RM = rm -f
+CC = gcc
+ifeq ($(UNAME), Linux)
+CFLAGS = -g -v -Iiniparser
+else
+CFLAGS = -g -v -arch i386 -Iiniparser
+endif
+
+all: libiniparser.a proclaunch
+
+libiniparser.a:
+ $(MAKE) -C iniparser
+
+proclaunch: proclaunch.c
+ @(echo "compiling proclaunch; platform: $(UNAME), WIN32: $(WIN32)")
+ $(CC) $(CFLAGS) -o proclaunch proclaunch.c -Iiniparser -Liniparser -liniparser
+
+clean:
+ $(RM) proclaunch
+ $(MAKE) -C iniparser clean
+endif
diff --git a/testing/mozbase/mozprocess/tests/infinite_loop.py b/testing/mozbase/mozprocess/tests/infinite_loop.py
new file mode 100644
index 000000000..e38e425e0
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/infinite_loop.py
@@ -0,0 +1,18 @@
+import threading
+import time
+import sys
+import signal
+
+if 'deadlock' in sys.argv:
+ lock = threading.Lock()
+
+ def trap(sig, frame):
+ lock.acquire()
+
+ # get the lock once
+ lock.acquire()
+ # and take it again on SIGTERM signal: deadlock.
+ signal.signal(signal.SIGTERM, trap)
+
+while 1:
+ time.sleep(1)
diff --git a/testing/mozbase/mozprocess/tests/iniparser/AUTHORS b/testing/mozbase/mozprocess/tests/iniparser/AUTHORS
new file mode 100644
index 000000000..d5a3f6b2e
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/AUTHORS
@@ -0,0 +1,6 @@
+Author: Nicolas Devillard <ndevilla@free.fr>
+
+This tiny library has received countless contributions and I have
+not kept track of all the people who contributed. Let them be thanked
+for their ideas, code, suggestions, corrections, enhancements!
+
diff --git a/testing/mozbase/mozprocess/tests/iniparser/INSTALL b/testing/mozbase/mozprocess/tests/iniparser/INSTALL
new file mode 100644
index 000000000..a5b05d0e2
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/INSTALL
@@ -0,0 +1,15 @@
+
+iniParser installation instructions
+-----------------------------------
+
+- Modify the Makefile to suit your environment.
+- Type 'make' to make the library.
+- Type 'make check' to make the test program.
+- Type 'test/iniexample' to launch the test program.
+- Type 'test/parse' to launch torture tests.
+
+
+
+Enjoy!
+N. Devillard
+Wed Mar 2 21:14:17 CET 2011
diff --git a/testing/mozbase/mozprocess/tests/iniparser/LICENSE b/testing/mozbase/mozprocess/tests/iniparser/LICENSE
new file mode 100644
index 000000000..5a3a80bab
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/LICENSE
@@ -0,0 +1,21 @@
+Copyright (c) 2000-2011 by Nicolas Devillard.
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
diff --git a/testing/mozbase/mozprocess/tests/iniparser/Makefile b/testing/mozbase/mozprocess/tests/iniparser/Makefile
new file mode 100644
index 000000000..48c86f9d6
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/Makefile
@@ -0,0 +1,85 @@
+#
+# iniparser Makefile
+#
+
+# source files
+SRCS = iniparser.c \
+ dictionary.c
+
+# include rules for platform determination
+include platform.mk
+
+# flags for the the various systems
+ifeq ($(UNAME), Linux)
+ # Compiler settings
+ CC = gcc
+ AR = ar
+ ARFLAGS = rcv
+ SHLD = ${CC} ${CFLAGS}
+ CFLAGS = -O2 -fPIC -Wall -ansi -pedantic
+ LDSHFLAGS = -shared -Wl,-Bsymbolic -Wl,-rpath -Wl,/usr/lib -Wl,-rpath,/usr/lib
+ LDFLAGS = -Wl,-rpath -Wl,/usr/lib -Wl,-rpath,/usr/lib
+endif
+ifeq ($(UNAME), Darwin)
+ # Compiler settings
+ CC = gcc
+ # Ar settings to build the library
+ AR = ar
+ ARFLAGS = rcv
+ SHLD = libtool
+ CFLAGS = -v -arch i386 -fPIC -Wall -ansi -pedantic
+ LDFLAGS = -arch_only i386
+endif
+ifeq ($(WIN32), 1)
+ CC = cl
+ CFLAGS = //Od //D "_WIN32" //D "WIN32" //D "_CONSOLE" //D "_CRT_SECURE_NO_WARNINGS" //D "_UNICODE" //D "UNICODE" //Gm //EHsc //RTC1 //MDd //W3 //nologo //c //ZI //TC
+ LDFLAGS = //OUT:"iniparser.lib" //NOLOGO
+ LINK = lib
+ RM = rm -f
+endif
+
+# windows build rules
+ifeq ($(WIN32), 1)
+
+COMPILE.c = $(CC) $(CFLAGS) -c
+OBJS = $(SRCS:.c=.obj)
+
+all: iniparser.obj dictionary.obj iniparser.lib
+
+iniparser.obj: dictionary.obj
+ @($(CC) $(CFLAGS) iniparser.c)
+
+dictionary.obj:
+ @(echo "compiling dictionary; WIN32: $(WIN32); platform: $(UNAME)")
+ @($(CC) $(CFLAGS) dictionary.c)
+
+iniparser.lib: dictionary.obj iniparser.obj
+ @(echo "linking $(OBJS)")
+ @($(LINK) $(LDFLAGS) $(OBJS))
+else
+
+# *nix (and Mac) build rules
+RM = rm -f
+COMPILE.c = $(CC) $(CFLAGS) -c
+OBJS = $(SRCS:.c=.o)
+
+all: libiniparser.a libiniparser.so
+
+.c.o:
+ @(echo "platform: $(UNAME), WIN32=$(WIN32); compiling $< ...")
+ @($(COMPILE.c) -o $@ $<)
+
+libiniparser.a: $(OBJS)
+ @($(AR) $(ARFLAGS) libiniparser.a $(OBJS))
+
+ifeq ($(UNAME), Linux)
+libiniparser.so: $(OBJS)
+ @$(SHLD) $(LDSHFLAGS) -o $@.0 $(OBJS) $(LDFLAGS)
+else
+libiniparser.so: $(OBJS)
+ @$(SHLD) -o $@.0 $(LDFLAGS) $(OBJS)
+endif
+endif
+
+clean:
+ $(RM) $(OBJS) libiniparser.*
diff --git a/testing/mozbase/mozprocess/tests/iniparser/README b/testing/mozbase/mozprocess/tests/iniparser/README
new file mode 100644
index 000000000..af2a5c38f
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/README
@@ -0,0 +1,12 @@
+
+Welcome to iniParser -- version 3.0
+released 02 Mar 2011
+
+This modules offers parsing of ini files from the C level.
+See a complete documentation in HTML format, from this directory
+open the file html/index.html with any HTML-capable browser.
+
+Enjoy!
+
+N.Devillard
+Wed Mar 2 21:46:14 CET 2011
diff --git a/testing/mozbase/mozprocess/tests/iniparser/dictionary.c b/testing/mozbase/mozprocess/tests/iniparser/dictionary.c
new file mode 100644
index 000000000..da41d9b2e
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/dictionary.c
@@ -0,0 +1,407 @@
+/*-------------------------------------------------------------------------*/
+/**
+ @file dictionary.c
+ @author N. Devillard
+ @date Sep 2007
+ @version $Revision: 1.27 $
+ @brief Implements a dictionary for string variables.
+
+ This module implements a simple dictionary object, i.e. a list
+ of string/string associations. This object is useful to store e.g.
+ informations retrieved from a configuration file (ini files).
+*/
+/*--------------------------------------------------------------------------*/
+
+/*
+ $Id: dictionary.c,v 1.27 2007-11-23 21:39:18 ndevilla Exp $
+ $Revision: 1.27 $
+*/
+/*---------------------------------------------------------------------------
+ Includes
+ ---------------------------------------------------------------------------*/
+#include "dictionary.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#ifndef _WIN32
+#include <unistd.h>
+#endif
+
+/** Maximum value size for integers and doubles. */
+#define MAXVALSZ 1024
+
+/** Minimal allocated number of entries in a dictionary */
+#define DICTMINSZ 128
+
+/** Invalid key token */
+#define DICT_INVALID_KEY ((char*)-1)
+
+/*---------------------------------------------------------------------------
+ Private functions
+ ---------------------------------------------------------------------------*/
+
+/* Doubles the allocated size associated to a pointer */
+/* 'size' is the current allocated size. */
+static void * mem_double(void * ptr, int size)
+{
+ void * newptr ;
+
+ newptr = calloc(2*size, 1);
+ if (newptr==NULL) {
+ return NULL ;
+ }
+ memcpy(newptr, ptr, size);
+ free(ptr);
+ return newptr ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Duplicate a string
+ @param s String to duplicate
+ @return Pointer to a newly allocated string, to be freed with free()
+
+ This is a replacement for strdup(). This implementation is provided
+ for systems that do not have it.
+ */
+/*--------------------------------------------------------------------------*/
+static char * xstrdup(char * s)
+{
+ char * t ;
+ if (!s)
+ return NULL ;
+ t = malloc(strlen(s)+1) ;
+ if (t) {
+ strcpy(t,s);
+ }
+ return t ;
+}
+
+/*---------------------------------------------------------------------------
+ Function codes
+ ---------------------------------------------------------------------------*/
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Compute the hash key for a string.
+ @param key Character string to use for key.
+ @return 1 unsigned int on at least 32 bits.
+
+ This hash function has been taken from an Article in Dr Dobbs Journal.
+ This is normally a collision-free function, distributing keys evenly.
+ The key is stored anyway in the struct so that collision can be avoided
+ by comparing the key itself in last resort.
+ */
+/*--------------------------------------------------------------------------*/
+unsigned dictionary_hash(char * key)
+{
+ int len ;
+ unsigned hash ;
+ int i ;
+
+ len = strlen(key);
+ for (hash=0, i=0 ; i<len ; i++) {
+ hash += (unsigned)key[i] ;
+ hash += (hash<<10);
+ hash ^= (hash>>6) ;
+ }
+ hash += (hash <<3);
+ hash ^= (hash >>11);
+ hash += (hash <<15);
+ return hash ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Create a new dictionary object.
+ @param size Optional initial size of the dictionary.
+ @return 1 newly allocated dictionary objet.
+
+ This function allocates a new dictionary object of given size and returns
+ it. If you do not know in advance (roughly) the number of entries in the
+ dictionary, give size=0.
+ */
+/*--------------------------------------------------------------------------*/
+dictionary * dictionary_new(int size)
+{
+ dictionary * d ;
+
+ /* If no size was specified, allocate space for DICTMINSZ */
+ if (size<DICTMINSZ) size=DICTMINSZ ;
+
+ if (!(d = (dictionary *)calloc(1, sizeof(dictionary)))) {
+ return NULL;
+ }
+ d->size = size ;
+ d->val = (char **)calloc(size, sizeof(char*));
+ d->key = (char **)calloc(size, sizeof(char*));
+ d->hash = (unsigned int *)calloc(size, sizeof(unsigned));
+ return d ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Delete a dictionary object
+ @param d dictionary object to deallocate.
+ @return void
+
+ Deallocate a dictionary object and all memory associated to it.
+ */
+/*--------------------------------------------------------------------------*/
+void dictionary_del(dictionary * d)
+{
+ int i ;
+
+ if (d==NULL) return ;
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]!=NULL)
+ free(d->key[i]);
+ if (d->val[i]!=NULL)
+ free(d->val[i]);
+ }
+ free(d->val);
+ free(d->key);
+ free(d->hash);
+ free(d);
+ return ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get a value from a dictionary.
+ @param d dictionary object to search.
+ @param key Key to look for in the dictionary.
+ @param def Default value to return if key not found.
+ @return 1 pointer to internally allocated character string.
+
+ This function locates a key in a dictionary and returns a pointer to its
+ value, or the passed 'def' pointer if no such key can be found in
+ dictionary. The returned character pointer points to data internal to the
+ dictionary object, you should not try to free it or modify it.
+ */
+/*--------------------------------------------------------------------------*/
+char * dictionary_get(dictionary * d, char * key, char * def)
+{
+ unsigned hash ;
+ int i ;
+
+ hash = dictionary_hash(key);
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ /* Compare hash */
+ if (hash==d->hash[i]) {
+ /* Compare string, to avoid hash collisions */
+ if (!strcmp(key, d->key[i])) {
+ return d->val[i] ;
+ }
+ }
+ }
+ return def ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Set a value in a dictionary.
+ @param d dictionary object to modify.
+ @param key Key to modify or add.
+ @param val Value to add.
+ @return int 0 if Ok, anything else otherwise
+
+ If the given key is found in the dictionary, the associated value is
+ replaced by the provided one. If the key cannot be found in the
+ dictionary, it is added to it.
+
+ It is Ok to provide a NULL value for val, but NULL values for the dictionary
+ or the key are considered as errors: the function will return immediately
+ in such a case.
+
+ Notice that if you dictionary_set a variable to NULL, a call to
+ dictionary_get will return a NULL value: the variable will be found, and
+ its value (NULL) is returned. In other words, setting the variable
+ content to NULL is equivalent to deleting the variable from the
+ dictionary. It is not possible (in this implementation) to have a key in
+ the dictionary without value.
+
+ This function returns non-zero in case of failure.
+ */
+/*--------------------------------------------------------------------------*/
+int dictionary_set(dictionary * d, char * key, char * val)
+{
+ int i ;
+ unsigned hash ;
+
+ if (d==NULL || key==NULL) return -1 ;
+
+ /* Compute hash for this key */
+ hash = dictionary_hash(key) ;
+ /* Find if value is already in dictionary */
+ if (d->n>0) {
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ if (hash==d->hash[i]) { /* Same hash value */
+ if (!strcmp(key, d->key[i])) { /* Same key */
+ /* Found a value: modify and return */
+ if (d->val[i]!=NULL)
+ free(d->val[i]);
+ d->val[i] = val ? xstrdup(val) : NULL ;
+ /* Value has been modified: return */
+ return 0 ;
+ }
+ }
+ }
+ }
+ /* Add a new value */
+ /* See if dictionary needs to grow */
+ if (d->n==d->size) {
+
+ /* Reached maximum size: reallocate dictionary */
+ d->val = (char **)mem_double(d->val, d->size * sizeof(char*)) ;
+ d->key = (char **)mem_double(d->key, d->size * sizeof(char*)) ;
+ d->hash = (unsigned int *)mem_double(d->hash, d->size * sizeof(unsigned)) ;
+ if ((d->val==NULL) || (d->key==NULL) || (d->hash==NULL)) {
+ /* Cannot grow dictionary */
+ return -1 ;
+ }
+ /* Double size */
+ d->size *= 2 ;
+ }
+
+ /* Insert key in the first empty slot */
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL) {
+ /* Add key here */
+ break ;
+ }
+ }
+ /* Copy key */
+ d->key[i] = xstrdup(key);
+ d->val[i] = val ? xstrdup(val) : NULL ;
+ d->hash[i] = hash;
+ d->n ++ ;
+ return 0 ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Delete a key in a dictionary
+ @param d dictionary object to modify.
+ @param key Key to remove.
+ @return void
+
+ This function deletes a key in a dictionary. Nothing is done if the
+ key cannot be found.
+ */
+/*--------------------------------------------------------------------------*/
+void dictionary_unset(dictionary * d, char * key)
+{
+ unsigned hash ;
+ int i ;
+
+ if (key == NULL) {
+ return;
+ }
+
+ hash = dictionary_hash(key);
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ /* Compare hash */
+ if (hash==d->hash[i]) {
+ /* Compare string, to avoid hash collisions */
+ if (!strcmp(key, d->key[i])) {
+ /* Found key */
+ break ;
+ }
+ }
+ }
+ if (i>=d->size)
+ /* Key not found */
+ return ;
+
+ free(d->key[i]);
+ d->key[i] = NULL ;
+ if (d->val[i]!=NULL) {
+ free(d->val[i]);
+ d->val[i] = NULL ;
+ }
+ d->hash[i] = 0 ;
+ d->n -- ;
+ return ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Dump a dictionary to an opened file pointer.
+ @param d Dictionary to dump
+ @param f Opened file pointer.
+ @return void
+
+ Dumps a dictionary onto an opened file pointer. Key pairs are printed out
+ as @c [Key]=[Value], one per line. It is Ok to provide stdout or stderr as
+ output file pointers.
+ */
+/*--------------------------------------------------------------------------*/
+void dictionary_dump(dictionary * d, FILE * out)
+{
+ int i ;
+
+ if (d==NULL || out==NULL) return ;
+ if (d->n<1) {
+ fprintf(out, "empty dictionary\n");
+ return ;
+ }
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]) {
+ fprintf(out, "%20s\t[%s]\n",
+ d->key[i],
+ d->val[i] ? d->val[i] : "UNDEF");
+ }
+ }
+ return ;
+}
+
+
+/* Test code */
+#ifdef TESTDIC
+#define NVALS 20000
+int main(int argc, char *argv[])
+{
+ dictionary * d ;
+ char * val ;
+ int i ;
+ char cval[90] ;
+
+ /* Allocate dictionary */
+ printf("allocating...\n");
+ d = dictionary_new(0);
+
+ /* Set values in dictionary */
+ printf("setting %d values...\n", NVALS);
+ for (i=0 ; i<NVALS ; i++) {
+ sprintf(cval, "%04d", i);
+ dictionary_set(d, cval, "salut");
+ }
+ printf("getting %d values...\n", NVALS);
+ for (i=0 ; i<NVALS ; i++) {
+ sprintf(cval, "%04d", i);
+ val = dictionary_get(d, cval, DICT_INVALID_KEY);
+ if (val==DICT_INVALID_KEY) {
+ printf("cannot get value for key [%s]\n", cval);
+ }
+ }
+ printf("unsetting %d values...\n", NVALS);
+ for (i=0 ; i<NVALS ; i++) {
+ sprintf(cval, "%04d", i);
+ dictionary_unset(d, cval);
+ }
+ if (d->n != 0) {
+ printf("error deleting values\n");
+ }
+ printf("deallocating...\n");
+ dictionary_del(d);
+ return 0 ;
+}
+#endif
+/* vim: set ts=4 et sw=4 tw=75 */
diff --git a/testing/mozbase/mozprocess/tests/iniparser/dictionary.h b/testing/mozbase/mozprocess/tests/iniparser/dictionary.h
new file mode 100644
index 000000000..e340a82d0
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/dictionary.h
@@ -0,0 +1,176 @@
+
+/*-------------------------------------------------------------------------*/
+/**
+ @file dictionary.h
+ @author N. Devillard
+ @date Sep 2007
+ @version $Revision: 1.12 $
+ @brief Implements a dictionary for string variables.
+
+ This module implements a simple dictionary object, i.e. a list
+ of string/string associations. This object is useful to store e.g.
+ informations retrieved from a configuration file (ini files).
+*/
+/*--------------------------------------------------------------------------*/
+
+/*
+ $Id: dictionary.h,v 1.12 2007-11-23 21:37:00 ndevilla Exp $
+ $Author: ndevilla $
+ $Date: 2007-11-23 21:37:00 $
+ $Revision: 1.12 $
+*/
+
+#ifndef _DICTIONARY_H_
+#define _DICTIONARY_H_
+
+/*---------------------------------------------------------------------------
+ Includes
+ ---------------------------------------------------------------------------*/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#ifndef _WIN32
+#include <unistd.h>
+#endif
+
+/*---------------------------------------------------------------------------
+ New types
+ ---------------------------------------------------------------------------*/
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Dictionary object
+
+ This object contains a list of string/string associations. Each
+ association is identified by a unique string key. Looking up values
+ in the dictionary is speeded up by the use of a (hopefully collision-free)
+ hash function.
+ */
+/*-------------------------------------------------------------------------*/
+typedef struct _dictionary_ {
+ int n ; /** Number of entries in dictionary */
+ int size ; /** Storage size */
+ char ** val ; /** List of string values */
+ char ** key ; /** List of string keys */
+ unsigned * hash ; /** List of hash values for keys */
+} dictionary ;
+
+
+/*---------------------------------------------------------------------------
+ Function prototypes
+ ---------------------------------------------------------------------------*/
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Compute the hash key for a string.
+ @param key Character string to use for key.
+ @return 1 unsigned int on at least 32 bits.
+
+ This hash function has been taken from an Article in Dr Dobbs Journal.
+ This is normally a collision-free function, distributing keys evenly.
+ The key is stored anyway in the struct so that collision can be avoided
+ by comparing the key itself in last resort.
+ */
+/*--------------------------------------------------------------------------*/
+unsigned dictionary_hash(char * key);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Create a new dictionary object.
+ @param size Optional initial size of the dictionary.
+ @return 1 newly allocated dictionary objet.
+
+ This function allocates a new dictionary object of given size and returns
+ it. If you do not know in advance (roughly) the number of entries in the
+ dictionary, give size=0.
+ */
+/*--------------------------------------------------------------------------*/
+dictionary * dictionary_new(int size);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Delete a dictionary object
+ @param d dictionary object to deallocate.
+ @return void
+
+ Deallocate a dictionary object and all memory associated to it.
+ */
+/*--------------------------------------------------------------------------*/
+void dictionary_del(dictionary * vd);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get a value from a dictionary.
+ @param d dictionary object to search.
+ @param key Key to look for in the dictionary.
+ @param def Default value to return if key not found.
+ @return 1 pointer to internally allocated character string.
+
+ This function locates a key in a dictionary and returns a pointer to its
+ value, or the passed 'def' pointer if no such key can be found in
+ dictionary. The returned character pointer points to data internal to the
+ dictionary object, you should not try to free it or modify it.
+ */
+/*--------------------------------------------------------------------------*/
+char * dictionary_get(dictionary * d, char * key, char * def);
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Set a value in a dictionary.
+ @param d dictionary object to modify.
+ @param key Key to modify or add.
+ @param val Value to add.
+ @return int 0 if Ok, anything else otherwise
+
+ If the given key is found in the dictionary, the associated value is
+ replaced by the provided one. If the key cannot be found in the
+ dictionary, it is added to it.
+
+ It is Ok to provide a NULL value for val, but NULL values for the dictionary
+ or the key are considered as errors: the function will return immediately
+ in such a case.
+
+ Notice that if you dictionary_set a variable to NULL, a call to
+ dictionary_get will return a NULL value: the variable will be found, and
+ its value (NULL) is returned. In other words, setting the variable
+ content to NULL is equivalent to deleting the variable from the
+ dictionary. It is not possible (in this implementation) to have a key in
+ the dictionary without value.
+
+ This function returns non-zero in case of failure.
+ */
+/*--------------------------------------------------------------------------*/
+int dictionary_set(dictionary * vd, char * key, char * val);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Delete a key in a dictionary
+ @param d dictionary object to modify.
+ @param key Key to remove.
+ @return void
+
+ This function deletes a key in a dictionary. Nothing is done if the
+ key cannot be found.
+ */
+/*--------------------------------------------------------------------------*/
+void dictionary_unset(dictionary * d, char * key);
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Dump a dictionary to an opened file pointer.
+ @param d Dictionary to dump
+ @param f Opened file pointer.
+ @return void
+
+ Dumps a dictionary onto an opened file pointer. Key pairs are printed out
+ as @c [Key]=[Value], one per line. It is Ok to provide stdout or stderr as
+ output file pointers.
+ */
+/*--------------------------------------------------------------------------*/
+void dictionary_dump(dictionary * d, FILE * out);
+
+#endif
diff --git a/testing/mozbase/mozprocess/tests/iniparser/iniparser.c b/testing/mozbase/mozprocess/tests/iniparser/iniparser.c
new file mode 100644
index 000000000..02a23b755
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/iniparser.c
@@ -0,0 +1,648 @@
+
+/*-------------------------------------------------------------------------*/
+/**
+ @file iniparser.c
+ @author N. Devillard
+ @date Sep 2007
+ @version 3.0
+ @brief Parser for ini files.
+*/
+/*--------------------------------------------------------------------------*/
+/*
+ $Id: iniparser.c,v 2.19 2011-03-02 20:15:13 ndevilla Exp $
+ $Revision: 2.19 $
+ $Date: 2011-03-02 20:15:13 $
+*/
+/*---------------------------- Includes ------------------------------------*/
+#include <ctype.h>
+#include "iniparser.h"
+
+/*---------------------------- Defines -------------------------------------*/
+#define ASCIILINESZ (1024)
+#define INI_INVALID_KEY ((char*)-1)
+
+/*---------------------------------------------------------------------------
+ Private to this module
+ ---------------------------------------------------------------------------*/
+/**
+ * This enum stores the status for each parsed line (internal use only).
+ */
+typedef enum _line_status_ {
+ LINE_UNPROCESSED,
+ LINE_ERROR,
+ LINE_EMPTY,
+ LINE_COMMENT,
+ LINE_SECTION,
+ LINE_VALUE
+} line_status ;
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Convert a string to lowercase.
+ @param s String to convert.
+ @return ptr to statically allocated string.
+
+ This function returns a pointer to a statically allocated string
+ containing a lowercased version of the input string. Do not free
+ or modify the returned string! Since the returned string is statically
+ allocated, it will be modified at each function call (not re-entrant).
+ */
+/*--------------------------------------------------------------------------*/
+static char * strlwc(char * s)
+{
+ static char l[ASCIILINESZ+1];
+ int i ;
+
+ if (s==NULL) return NULL ;
+ memset(l, 0, ASCIILINESZ+1);
+ i=0 ;
+ while (s[i] && i<ASCIILINESZ) {
+ l[i] = (char)tolower((int)s[i]);
+ i++ ;
+ }
+ l[ASCIILINESZ]=(char)0;
+ return l ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Remove blanks at the beginning and the end of a string.
+ @param s String to parse.
+ @return ptr to statically allocated string.
+
+ This function returns a pointer to a statically allocated string,
+ which is identical to the input string, except that all blank
+ characters at the end and the beg. of the string have been removed.
+ Do not free or modify the returned string! Since the returned string
+ is statically allocated, it will be modified at each function call
+ (not re-entrant).
+ */
+/*--------------------------------------------------------------------------*/
+static char * strstrip(char * s)
+{
+ static char l[ASCIILINESZ+1];
+ char * last ;
+
+ if (s==NULL) return NULL ;
+
+ while (isspace((int)*s) && *s) s++;
+ memset(l, 0, ASCIILINESZ+1);
+ strcpy(l, s);
+ last = l + strlen(l);
+ while (last > l) {
+ if (!isspace((int)*(last-1)))
+ break ;
+ last -- ;
+ }
+ *last = (char)0;
+ return (char*)l ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get number of sections in a dictionary
+ @param d Dictionary to examine
+ @return int Number of sections found in dictionary
+
+ This function returns the number of sections found in a dictionary.
+ The test to recognize sections is done on the string stored in the
+ dictionary: a section name is given as "section" whereas a key is
+ stored as "section:key", thus the test looks for entries that do not
+ contain a colon.
+
+ This clearly fails in the case a section name contains a colon, but
+ this should simply be avoided.
+
+ This function returns -1 in case of error.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_getnsec(dictionary * d)
+{
+ int i ;
+ int nsec ;
+
+ if (d==NULL) return -1 ;
+ nsec=0 ;
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ if (strchr(d->key[i], ':')==NULL) {
+ nsec ++ ;
+ }
+ }
+ return nsec ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get name for section n in a dictionary.
+ @param d Dictionary to examine
+ @param n Section number (from 0 to nsec-1).
+ @return Pointer to char string
+
+ This function locates the n-th section in a dictionary and returns
+ its name as a pointer to a string statically allocated inside the
+ dictionary. Do not free or modify the returned string!
+
+ This function returns NULL in case of error.
+ */
+/*--------------------------------------------------------------------------*/
+char * iniparser_getsecname(dictionary * d, int n)
+{
+ int i ;
+ int foundsec ;
+
+ if (d==NULL || n<0) return NULL ;
+ foundsec=0 ;
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ if (strchr(d->key[i], ':')==NULL) {
+ foundsec++ ;
+ if (foundsec>n)
+ break ;
+ }
+ }
+ if (foundsec<=n) {
+ return NULL ;
+ }
+ return d->key[i] ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Dump a dictionary to an opened file pointer.
+ @param d Dictionary to dump.
+ @param f Opened file pointer to dump to.
+ @return void
+
+ This function prints out the contents of a dictionary, one element by
+ line, onto the provided file pointer. It is OK to specify @c stderr
+ or @c stdout as output files. This function is meant for debugging
+ purposes mostly.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_dump(dictionary * d, FILE * f)
+{
+ int i ;
+
+ if (d==NULL || f==NULL) return ;
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ if (d->val[i]!=NULL) {
+ fprintf(f, "[%s]=[%s]\n", d->key[i], d->val[i]);
+ } else {
+ fprintf(f, "[%s]=UNDEF\n", d->key[i]);
+ }
+ }
+ return ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Save a dictionary to a loadable ini file
+ @param d Dictionary to dump
+ @param f Opened file pointer to dump to
+ @return void
+
+ This function dumps a given dictionary into a loadable ini file.
+ It is Ok to specify @c stderr or @c stdout as output files.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_dump_ini(dictionary * d, FILE * f)
+{
+ int i, j ;
+ char keym[ASCIILINESZ+1];
+ int nsec ;
+ char * secname ;
+ int seclen ;
+
+ if (d==NULL || f==NULL) return ;
+
+ nsec = iniparser_getnsec(d);
+ if (nsec<1) {
+ /* No section in file: dump all keys as they are */
+ for (i=0 ; i<d->size ; i++) {
+ if (d->key[i]==NULL)
+ continue ;
+ fprintf(f, "%s = %s\n", d->key[i], d->val[i]);
+ }
+ return ;
+ }
+ for (i=0 ; i<nsec ; i++) {
+ secname = iniparser_getsecname(d, i) ;
+ seclen = (int)strlen(secname);
+ fprintf(f, "\n[%s]\n", secname);
+ sprintf(keym, "%s:", secname);
+ for (j=0 ; j<d->size ; j++) {
+ if (d->key[j]==NULL)
+ continue ;
+ if (!strncmp(d->key[j], keym, seclen+1)) {
+ fprintf(f,
+ "%-30s = %s\n",
+ d->key[j]+seclen+1,
+ d->val[j] ? d->val[j] : "");
+ }
+ }
+ }
+ fprintf(f, "\n");
+ return ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param def Default value to return if key not found.
+ @return pointer to statically allocated character string
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the pointer passed as 'def' is returned.
+ The returned char pointer is pointing to a string allocated in
+ the dictionary, do not free or modify it.
+ */
+/*--------------------------------------------------------------------------*/
+char * iniparser_getstring(dictionary * d, char * key, char * def)
+{
+ char * lc_key ;
+ char * sval ;
+
+ if (d==NULL || key==NULL)
+ return def ;
+
+ lc_key = strlwc(key);
+ sval = dictionary_get(d, lc_key, def);
+ return sval ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key, convert to an int
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param notfound Value to return in case of error
+ @return integer
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the notfound value is returned.
+
+ Supported values for integers include the usual C notation
+ so decimal, octal (starting with 0) and hexadecimal (starting with 0x)
+ are supported. Examples:
+
+ "42" -> 42
+ "042" -> 34 (octal -> decimal)
+ "0x42" -> 66 (hexa -> decimal)
+
+ Warning: the conversion may overflow in various ways. Conversion is
+ totally outsourced to strtol(), see the associated man page for overflow
+ handling.
+
+ Credits: Thanks to A. Becker for suggesting strtol()
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_getint(dictionary * d, char * key, int notfound)
+{
+ char * str ;
+
+ str = iniparser_getstring(d, key, INI_INVALID_KEY);
+ if (str==INI_INVALID_KEY) return notfound ;
+ return (int)strtol(str, NULL, 0);
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key, convert to a double
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param notfound Value to return in case of error
+ @return double
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the notfound value is returned.
+ */
+/*--------------------------------------------------------------------------*/
+double iniparser_getdouble(dictionary * d, char * key, double notfound)
+{
+ char * str ;
+
+ str = iniparser_getstring(d, key, INI_INVALID_KEY);
+ if (str==INI_INVALID_KEY) return notfound ;
+ return atof(str);
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key, convert to a boolean
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param notfound Value to return in case of error
+ @return integer
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the notfound value is returned.
+
+ A true boolean is found if one of the following is matched:
+
+ - A string starting with 'y'
+ - A string starting with 'Y'
+ - A string starting with 't'
+ - A string starting with 'T'
+ - A string starting with '1'
+
+ A false boolean is found if one of the following is matched:
+
+ - A string starting with 'n'
+ - A string starting with 'N'
+ - A string starting with 'f'
+ - A string starting with 'F'
+ - A string starting with '0'
+
+ The notfound value returned if no boolean is identified, does not
+ necessarily have to be 0 or 1.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_getboolean(dictionary * d, char * key, int notfound)
+{
+ char * c ;
+ int ret ;
+
+ c = iniparser_getstring(d, key, INI_INVALID_KEY);
+ if (c==INI_INVALID_KEY) return notfound ;
+ if (c[0]=='y' || c[0]=='Y' || c[0]=='1' || c[0]=='t' || c[0]=='T') {
+ ret = 1 ;
+ } else if (c[0]=='n' || c[0]=='N' || c[0]=='0' || c[0]=='f' || c[0]=='F') {
+ ret = 0 ;
+ } else {
+ ret = notfound ;
+ }
+ return ret;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Finds out if a given entry exists in a dictionary
+ @param ini Dictionary to search
+ @param entry Name of the entry to look for
+ @return integer 1 if entry exists, 0 otherwise
+
+ Finds out if a given entry exists in the dictionary. Since sections
+ are stored as keys with NULL associated values, this is the only way
+ of querying for the presence of sections in a dictionary.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_find_entry(
+ dictionary * ini,
+ char * entry
+)
+{
+ int found=0 ;
+ if (iniparser_getstring(ini, entry, INI_INVALID_KEY)!=INI_INVALID_KEY) {
+ found = 1 ;
+ }
+ return found ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Set an entry in a dictionary.
+ @param ini Dictionary to modify.
+ @param entry Entry to modify (entry name)
+ @param val New value to associate to the entry.
+ @return int 0 if Ok, -1 otherwise.
+
+ If the given entry can be found in the dictionary, it is modified to
+ contain the provided value. If it cannot be found, -1 is returned.
+ It is Ok to set val to NULL.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_set(dictionary * ini, char * entry, char * val)
+{
+ return dictionary_set(ini, strlwc(entry), val) ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Delete an entry in a dictionary
+ @param ini Dictionary to modify
+ @param entry Entry to delete (entry name)
+ @return void
+
+ If the given entry can be found, it is deleted from the dictionary.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_unset(dictionary * ini, char * entry)
+{
+ dictionary_unset(ini, strlwc(entry));
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Load a single line from an INI file
+ @param input_line Input line, may be concatenated multi-line input
+ @param section Output space to store section
+ @param key Output space to store key
+ @param value Output space to store value
+ @return line_status value
+ */
+/*--------------------------------------------------------------------------*/
+static line_status iniparser_line(
+ char * input_line,
+ char * section,
+ char * key,
+ char * value)
+{
+ line_status sta ;
+ char line[ASCIILINESZ+1];
+ int len ;
+
+ strcpy(line, strstrip(input_line));
+ len = (int)strlen(line);
+
+ sta = LINE_UNPROCESSED ;
+ if (len<1) {
+ /* Empty line */
+ sta = LINE_EMPTY ;
+ } else if (line[0]=='#' || line[0]==';') {
+ /* Comment line */
+ sta = LINE_COMMENT ;
+ } else if (line[0]=='[' && line[len-1]==']') {
+ /* Section name */
+ sscanf(line, "[%[^]]", section);
+ strcpy(section, strstrip(section));
+ strcpy(section, strlwc(section));
+ sta = LINE_SECTION ;
+ } else if (sscanf (line, "%[^=] = \"%[^\"]\"", key, value) == 2
+ || sscanf (line, "%[^=] = '%[^\']'", key, value) == 2
+ || sscanf (line, "%[^=] = %[^;#]", key, value) == 2) {
+ /* Usual key=value, with or without comments */
+ strcpy(key, strstrip(key));
+ strcpy(key, strlwc(key));
+ strcpy(value, strstrip(value));
+ /*
+ * sscanf cannot handle '' or "" as empty values
+ * this is done here
+ */
+ if (!strcmp(value, "\"\"") || (!strcmp(value, "''"))) {
+ value[0]=0 ;
+ }
+ sta = LINE_VALUE ;
+ } else if (sscanf(line, "%[^=] = %[;#]", key, value)==2
+ || sscanf(line, "%[^=] %[=]", key, value) == 2) {
+ /*
+ * Special cases:
+ * key=
+ * key=;
+ * key=#
+ */
+ strcpy(key, strstrip(key));
+ strcpy(key, strlwc(key));
+ value[0]=0 ;
+ sta = LINE_VALUE ;
+ } else {
+ /* Generate syntax error */
+ sta = LINE_ERROR ;
+ }
+ return sta ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Parse an ini file and return an allocated dictionary object
+ @param ininame Name of the ini file to read.
+ @return Pointer to newly allocated dictionary
+
+ This is the parser for ini files. This function is called, providing
+ the name of the file to be read. It returns a dictionary object that
+ should not be accessed directly, but through accessor functions
+ instead.
+
+ The returned dictionary must be freed using iniparser_freedict().
+ */
+/*--------------------------------------------------------------------------*/
+dictionary * iniparser_load(char * ininame)
+{
+ FILE * in ;
+
+ char line [ASCIILINESZ+1] ;
+ char section [ASCIILINESZ+1] ;
+ char key [ASCIILINESZ+1] ;
+ char tmp [ASCIILINESZ+1] ;
+ char val [ASCIILINESZ+1] ;
+
+ int last=0 ;
+ int len ;
+ int lineno=0 ;
+ int errs=0;
+
+ dictionary * dict ;
+
+ if ((in=fopen(ininame, "r"))==NULL) {
+ fprintf(stderr, "iniparser: cannot open %s\n", ininame);
+ return NULL ;
+ }
+
+ dict = dictionary_new(0) ;
+ if (!dict) {
+ fclose(in);
+ return NULL ;
+ }
+
+ memset(line, 0, ASCIILINESZ);
+ memset(section, 0, ASCIILINESZ);
+ memset(key, 0, ASCIILINESZ);
+ memset(val, 0, ASCIILINESZ);
+ last=0 ;
+
+ while (fgets(line+last, ASCIILINESZ-last, in)!=NULL) {
+ lineno++ ;
+ len = (int)strlen(line)-1;
+ if (len==0)
+ continue;
+ /* Safety check against buffer overflows */
+ if (line[len]!='\n') {
+ fprintf(stderr,
+ "iniparser: input line too long in %s (%d)\n",
+ ininame,
+ lineno);
+ dictionary_del(dict);
+ fclose(in);
+ return NULL ;
+ }
+ /* Get rid of \n and spaces at end of line */
+ while ((len>=0) &&
+ ((line[len]=='\n') || (isspace(line[len])))) {
+ line[len]=0 ;
+ len-- ;
+ }
+ /* Detect multi-line */
+ if (line[len]=='\\') {
+ /* Multi-line value */
+ last=len ;
+ continue ;
+ } else {
+ last=0 ;
+ }
+ switch (iniparser_line(line, section, key, val)) {
+ case LINE_EMPTY:
+ case LINE_COMMENT:
+ break ;
+
+ case LINE_SECTION:
+ errs = dictionary_set(dict, section, NULL);
+ break ;
+
+ case LINE_VALUE:
+ sprintf(tmp, "%s:%s", section, key);
+ errs = dictionary_set(dict, tmp, val) ;
+ break ;
+
+ case LINE_ERROR:
+ fprintf(stderr, "iniparser: syntax error in %s (%d):\n",
+ ininame,
+ lineno);
+ fprintf(stderr, "-> %s\n", line);
+ errs++ ;
+ break;
+
+ default:
+ break ;
+ }
+ memset(line, 0, ASCIILINESZ);
+ last=0;
+ if (errs<0) {
+ fprintf(stderr, "iniparser: memory allocation failure\n");
+ break ;
+ }
+ }
+ if (errs) {
+ dictionary_del(dict);
+ dict = NULL ;
+ }
+ fclose(in);
+ return dict ;
+}
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Free all memory associated to an ini dictionary
+ @param d Dictionary to free
+ @return void
+
+ Free all memory associated to an ini dictionary.
+ It is mandatory to call this function before the dictionary object
+ gets out of the current context.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_freedict(dictionary * d)
+{
+ dictionary_del(d);
+}
+
+/* vim: set ts=4 et sw=4 tw=75 */
diff --git a/testing/mozbase/mozprocess/tests/iniparser/iniparser.h b/testing/mozbase/mozprocess/tests/iniparser/iniparser.h
new file mode 100644
index 000000000..e3468b2c9
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/iniparser.h
@@ -0,0 +1,273 @@
+
+/*-------------------------------------------------------------------------*/
+/**
+ @file iniparser.h
+ @author N. Devillard
+ @date Sep 2007
+ @version 3.0
+ @brief Parser for ini files.
+*/
+/*--------------------------------------------------------------------------*/
+
+/*
+ $Id: iniparser.h,v 1.26 2011-03-02 20:15:13 ndevilla Exp $
+ $Revision: 1.26 $
+*/
+
+#ifndef _INIPARSER_H_
+#define _INIPARSER_H_
+
+/*---------------------------------------------------------------------------
+ Includes
+ ---------------------------------------------------------------------------*/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+/*
+ * The following #include is necessary on many Unixes but not Linux.
+ * It is not needed for Windows platforms.
+ * Uncomment it if needed.
+ */
+/* #include <unistd.h> */
+
+#include "dictionary.h"
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get number of sections in a dictionary
+ @param d Dictionary to examine
+ @return int Number of sections found in dictionary
+
+ This function returns the number of sections found in a dictionary.
+ The test to recognize sections is done on the string stored in the
+ dictionary: a section name is given as "section" whereas a key is
+ stored as "section:key", thus the test looks for entries that do not
+ contain a colon.
+
+ This clearly fails in the case a section name contains a colon, but
+ this should simply be avoided.
+
+ This function returns -1 in case of error.
+ */
+/*--------------------------------------------------------------------------*/
+
+int iniparser_getnsec(dictionary * d);
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get name for section n in a dictionary.
+ @param d Dictionary to examine
+ @param n Section number (from 0 to nsec-1).
+ @return Pointer to char string
+
+ This function locates the n-th section in a dictionary and returns
+ its name as a pointer to a string statically allocated inside the
+ dictionary. Do not free or modify the returned string!
+
+ This function returns NULL in case of error.
+ */
+/*--------------------------------------------------------------------------*/
+
+char * iniparser_getsecname(dictionary * d, int n);
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Save a dictionary to a loadable ini file
+ @param d Dictionary to dump
+ @param f Opened file pointer to dump to
+ @return void
+
+ This function dumps a given dictionary into a loadable ini file.
+ It is Ok to specify @c stderr or @c stdout as output files.
+ */
+/*--------------------------------------------------------------------------*/
+
+void iniparser_dump_ini(dictionary * d, FILE * f);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Dump a dictionary to an opened file pointer.
+ @param d Dictionary to dump.
+ @param f Opened file pointer to dump to.
+ @return void
+
+ This function prints out the contents of a dictionary, one element by
+ line, onto the provided file pointer. It is OK to specify @c stderr
+ or @c stdout as output files. This function is meant for debugging
+ purposes mostly.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_dump(dictionary * d, FILE * f);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param def Default value to return if key not found.
+ @return pointer to statically allocated character string
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the pointer passed as 'def' is returned.
+ The returned char pointer is pointing to a string allocated in
+ the dictionary, do not free or modify it.
+ */
+/*--------------------------------------------------------------------------*/
+char * iniparser_getstring(dictionary * d, char * key, char * def);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key, convert to an int
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param notfound Value to return in case of error
+ @return integer
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the notfound value is returned.
+
+ Supported values for integers include the usual C notation
+ so decimal, octal (starting with 0) and hexadecimal (starting with 0x)
+ are supported. Examples:
+
+ - "42" -> 42
+ - "042" -> 34 (octal -> decimal)
+ - "0x42" -> 66 (hexa -> decimal)
+
+ Warning: the conversion may overflow in various ways. Conversion is
+ totally outsourced to strtol(), see the associated man page for overflow
+ handling.
+
+ Credits: Thanks to A. Becker for suggesting strtol()
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_getint(dictionary * d, char * key, int notfound);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key, convert to a double
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param notfound Value to return in case of error
+ @return double
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the notfound value is returned.
+ */
+/*--------------------------------------------------------------------------*/
+double iniparser_getdouble(dictionary * d, char * key, double notfound);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Get the string associated to a key, convert to a boolean
+ @param d Dictionary to search
+ @param key Key string to look for
+ @param notfound Value to return in case of error
+ @return integer
+
+ This function queries a dictionary for a key. A key as read from an
+ ini file is given as "section:key". If the key cannot be found,
+ the notfound value is returned.
+
+ A true boolean is found if one of the following is matched:
+
+ - A string starting with 'y'
+ - A string starting with 'Y'
+ - A string starting with 't'
+ - A string starting with 'T'
+ - A string starting with '1'
+
+ A false boolean is found if one of the following is matched:
+
+ - A string starting with 'n'
+ - A string starting with 'N'
+ - A string starting with 'f'
+ - A string starting with 'F'
+ - A string starting with '0'
+
+ The notfound value returned if no boolean is identified, does not
+ necessarily have to be 0 or 1.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_getboolean(dictionary * d, char * key, int notfound);
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Set an entry in a dictionary.
+ @param ini Dictionary to modify.
+ @param entry Entry to modify (entry name)
+ @param val New value to associate to the entry.
+ @return int 0 if Ok, -1 otherwise.
+
+ If the given entry can be found in the dictionary, it is modified to
+ contain the provided value. If it cannot be found, -1 is returned.
+ It is Ok to set val to NULL.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_set(dictionary * ini, char * entry, char * val);
+
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Delete an entry in a dictionary
+ @param ini Dictionary to modify
+ @param entry Entry to delete (entry name)
+ @return void
+
+ If the given entry can be found, it is deleted from the dictionary.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_unset(dictionary * ini, char * entry);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Finds out if a given entry exists in a dictionary
+ @param ini Dictionary to search
+ @param entry Name of the entry to look for
+ @return integer 1 if entry exists, 0 otherwise
+
+ Finds out if a given entry exists in the dictionary. Since sections
+ are stored as keys with NULL associated values, this is the only way
+ of querying for the presence of sections in a dictionary.
+ */
+/*--------------------------------------------------------------------------*/
+int iniparser_find_entry(dictionary * ini, char * entry) ;
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Parse an ini file and return an allocated dictionary object
+ @param ininame Name of the ini file to read.
+ @return Pointer to newly allocated dictionary
+
+ This is the parser for ini files. This function is called, providing
+ the name of the file to be read. It returns a dictionary object that
+ should not be accessed directly, but through accessor functions
+ instead.
+
+ The returned dictionary must be freed using iniparser_freedict().
+ */
+/*--------------------------------------------------------------------------*/
+dictionary * iniparser_load(char * ininame);
+
+/*-------------------------------------------------------------------------*/
+/**
+ @brief Free all memory associated to an ini dictionary
+ @param d Dictionary to free
+ @return void
+
+ Free all memory associated to an ini dictionary.
+ It is mandatory to call this function before the dictionary object
+ gets out of the current context.
+ */
+/*--------------------------------------------------------------------------*/
+void iniparser_freedict(dictionary * d);
+
+#endif
diff --git a/testing/mozbase/mozprocess/tests/iniparser/platform.mk b/testing/mozbase/mozprocess/tests/iniparser/platform.mk
new file mode 100644
index 000000000..bff0296fe
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/iniparser/platform.mk
@@ -0,0 +1,8 @@
+# System platform
+
+# determine if windows
+WIN32 := 0
+UNAME := $(shell uname -s)
+ifneq (,$(findstring MINGW32_NT,$(UNAME)))
+WIN32 = 1
+endif
diff --git a/testing/mozbase/mozprocess/tests/manifest.ini b/testing/mozbase/mozprocess/tests/manifest.ini
new file mode 100644
index 000000000..d869952e3
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/manifest.ini
@@ -0,0 +1,18 @@
+# does not currently work on windows
+# see https://bugzilla.mozilla.org/show_bug.cgi?id=790765#c51
+
+[DEFAULT]
+# bug https://bugzilla.mozilla.org/show_bug.cgi?id=778267#c26
+skip-if = (os == "win")
+
+[test_mozprocess.py]
+disabled = bug 877864
+[test_mozprocess_kill.py]
+[test_mozprocess_kill_broad_wait.py]
+disabled = bug 921632
+[test_mozprocess_misc.py]
+[test_mozprocess_poll.py]
+[test_mozprocess_wait.py]
+[test_mozprocess_output.py]
+[test_mozprocess_params.py]
+[test_process_reader.py]
diff --git a/testing/mozbase/mozprocess/tests/proccountfive.py b/testing/mozbase/mozprocess/tests/proccountfive.py
new file mode 100644
index 000000000..5ec74b32a
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/proccountfive.py
@@ -0,0 +1,2 @@
+for i in range(0, 5):
+ print i
diff --git a/testing/mozbase/mozprocess/tests/process_normal_broad_python.ini b/testing/mozbase/mozprocess/tests/process_normal_broad_python.ini
new file mode 100644
index 000000000..28109cb31
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_normal_broad_python.ini
@@ -0,0 +1,30 @@
+; Generate a Broad Process Tree
+; This generates a Tree of the form:
+;
+; main
+; \_ c1
+; | \_ c2
+; | \_ c2
+; | \_ c2
+; | \_ c2
+; | \_ c2
+; |
+; \_ c1
+; | \_ c2
+; | \_ c2
+; | \_ c2
+; | \_ c2
+; | \_ c2
+; |
+; \_ ... 23 more times
+
+[main]
+children=25*c1
+maxtime=10
+
+[c1]
+children=5*c2
+maxtime=10
+
+[c2]
+maxtime=5
diff --git a/testing/mozbase/mozprocess/tests/process_normal_deep_python.ini b/testing/mozbase/mozprocess/tests/process_normal_deep_python.ini
new file mode 100644
index 000000000..ef9809f6a
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_normal_deep_python.ini
@@ -0,0 +1,65 @@
+; Deep Process Tree
+; Should generate a process tree of the form:
+;
+; main
+; \_ c2
+; | \_ c5
+; | | \_ c6
+; | | \_ c7
+; | | \_ c8
+; | | \_ c1
+; | | \_ c4
+; | \_ c5
+; | \_ c6
+; | \_ c7
+; | \_ c8
+; | \_ c1
+; | \_ c4
+; \_ c2
+; | \_ c5
+; | | \_ c6
+; | | \_ c7
+; | | \_ c8
+; | | \_ c1
+; | | \_ c4
+; | \_ c5
+; | \_ c6
+; | \_ c7
+; | \_ c8
+; | \_ c1
+; | \_ c4
+; \_ c1
+; | \_ c4
+; \_ c1
+; \_ c4
+
+[main]
+children=2*c1, 2*c2
+maxtime=20
+
+[c1]
+children=c4
+maxtime=20
+
+[c2]
+children=2*c5
+maxtime=20
+
+[c4]
+maxtime=20
+
+[c5]
+children=c6
+maxtime=20
+
+[c6]
+children=c7
+maxtime=20
+
+[c7]
+children=c8
+maxtime=20
+
+[c8]
+children=c1
+maxtime=20
diff --git a/testing/mozbase/mozprocess/tests/process_normal_finish.ini b/testing/mozbase/mozprocess/tests/process_normal_finish.ini
new file mode 100644
index 000000000..c4468de49
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_normal_finish.ini
@@ -0,0 +1,11 @@
+[main]
+children=c1,c2
+maxtime=60
+
+[c1]
+children=2
+maxtime=60
+
+[c2]
+children=0
+maxtime=30
diff --git a/testing/mozbase/mozprocess/tests/process_normal_finish_no_process_group.ini b/testing/mozbase/mozprocess/tests/process_normal_finish_no_process_group.ini
new file mode 100644
index 000000000..2b0f1f9a4
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_normal_finish_no_process_group.ini
@@ -0,0 +1,2 @@
+[main]
+maxtime=10
diff --git a/testing/mozbase/mozprocess/tests/process_normal_finish_python.ini b/testing/mozbase/mozprocess/tests/process_normal_finish_python.ini
new file mode 100644
index 000000000..4519c7083
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_normal_finish_python.ini
@@ -0,0 +1,17 @@
+; Generates a normal process tree
+; Tree is of the form:
+; main
+; \_ c1
+; \_ c2
+
+[main]
+children=c1,c2
+maxtime=10
+
+[c1]
+children=c2
+maxtime=5
+
+[c2]
+maxtime=5
+
diff --git a/testing/mozbase/mozprocess/tests/process_waittimeout.ini b/testing/mozbase/mozprocess/tests/process_waittimeout.ini
new file mode 100644
index 000000000..77cbf2e39
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_waittimeout.ini
@@ -0,0 +1,11 @@
+[main]
+children=c1,c2
+maxtime=300
+
+[c1]
+children=2
+maxtime=300
+
+[c2]
+children=3
+maxtime=300
diff --git a/testing/mozbase/mozprocess/tests/process_waittimeout_10s.ini b/testing/mozbase/mozprocess/tests/process_waittimeout_10s.ini
new file mode 100644
index 000000000..59d2d76ff
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_waittimeout_10s.ini
@@ -0,0 +1,8 @@
+[main]
+children=c1
+maxtime=10
+
+[c1]
+children=2
+maxtime=5
+
diff --git a/testing/mozbase/mozprocess/tests/process_waittimeout_10s_python.ini b/testing/mozbase/mozprocess/tests/process_waittimeout_10s_python.ini
new file mode 100644
index 000000000..abf8d6a4e
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_waittimeout_10s_python.ini
@@ -0,0 +1,16 @@
+; Generate a normal process tree
+; Tree is of the form:
+; main
+; \_ c1
+; \_ c2
+
+[main]
+children=c1
+maxtime=10
+
+[c1]
+children=2*c2
+maxtime=5
+
+[c2]
+maxtime=5
diff --git a/testing/mozbase/mozprocess/tests/process_waittimeout_python.ini b/testing/mozbase/mozprocess/tests/process_waittimeout_python.ini
new file mode 100644
index 000000000..5800267d1
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/process_waittimeout_python.ini
@@ -0,0 +1,16 @@
+; Generates a normal process tree
+; Tree is of the form:
+; main
+; \_ c1
+; \_ c2
+
+[main]
+children=2*c1
+maxtime=300
+
+[c1]
+children=2*c2
+maxtime=300
+
+[c2]
+maxtime=300
diff --git a/testing/mozbase/mozprocess/tests/proclaunch.c b/testing/mozbase/mozprocess/tests/proclaunch.c
new file mode 100644
index 000000000..05c564c79
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/proclaunch.c
@@ -0,0 +1,156 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include "iniparser.h"
+
+#ifdef _WIN32
+#include <windows.h>
+#include <tchar.h>
+
+extern int iniparser_getint(dictionary *d, char *key, int notfound);
+extern char *iniparser_getstring(dictionary *d, char *key, char *def);
+
+// This is the windows launcher function
+int launchWindows(int children, int maxtime) {
+ _TCHAR cmdline[50];
+ STARTUPINFO startup;
+ PROCESS_INFORMATION procinfo;
+ BOOL rv = 0;
+
+ _stprintf(cmdline, _T("proclaunch.exe %d %d"), children, maxtime);
+ ZeroMemory(&startup, sizeof(STARTUPINFO));
+ startup.cb = sizeof(STARTUPINFO);
+
+ ZeroMemory(&procinfo, sizeof(PROCESS_INFORMATION));
+
+ printf("Launching process!\n");
+ rv = CreateProcess(NULL,
+ cmdline,
+ NULL,
+ NULL,
+ FALSE,
+ 0,
+ NULL,
+ NULL,
+ &startup,
+ &procinfo);
+
+ if (!rv) {
+ DWORD dw = GetLastError();
+ printf("error: %d\n", dw);
+ }
+ CloseHandle(procinfo.hProcess);
+ CloseHandle(procinfo.hThread);
+ return 0;
+}
+#endif
+
+int main(int argc, char **argv) {
+ int children = 0;
+ int maxtime = 0;
+ int passedtime = 0;
+ dictionary *dict = NULL;
+
+ // Command line handling
+ if (argc == 1 || (0 == strcmp(argv[1], "-h")) || (0 == strcmp(argv[1], "--help"))) {
+ printf("ProcLauncher takes an ini file. Specify the ini file as the only\n");
+ printf("parameter of the command line:\n");
+ printf("proclauncher my.ini\n\n");
+ printf("The ini file has the form:\n");
+ printf("[main]\n");
+ printf("children=child1,child2 ; These comma separated values are sections\n");
+ printf("maxtime=60 ; Max time this process lives\n");
+ printf("[child1] ; Here is a child section\n");
+ printf("children=3 ; You can have grandchildren: this spawns 3 of them for child1\n");
+ printf("maxtime=30 ; Max time, note it's in seconds. If this time\n");
+ printf(" ; is > main:maxtime then the child process will be\n");
+ printf(" ; killed when the parent exits. Also, grandchildren\n");
+ printf("[child2] ; inherit this maxtime and can't change it.\n");
+ printf("maxtime=25 ; You can call these sections whatever you want\n");
+ printf("children=0 ; as long as you reference them in a children attribute\n");
+ printf("....\n");
+ return 0;
+ } else if (argc == 2) {
+ // This is ini file mode:
+ // proclauncher <inifile>
+ dict = iniparser_load(argv[1]);
+
+ } else if (argc == 3) {
+ // Then we've been called in child process launching mode:
+ // proclauncher <children> <maxtime>
+ children = atoi(argv[1]);
+ maxtime = atoi(argv[2]);
+ }
+
+ if (dict) {
+ /* Dict operation */
+ char *childlist = iniparser_getstring(dict, "main:children", NULL);
+ maxtime = iniparser_getint(dict, (char*)"main:maxtime", 10);;
+ if (childlist) {
+ int c = 0, m = 10;
+ char childkey[50], maxkey[50];
+ char cmd[25];
+ char *token = strtok(childlist, ",");
+
+ while (token) {
+ // Reset defaults
+ memset(childkey, 0, 50);
+ memset(maxkey, 0, 50);
+ memset(cmd, 0, 25);
+ c = 0;
+ m = 10;
+
+ sprintf(childkey, "%s:children", token);
+ sprintf(maxkey, "%s:maxtime", token);
+ c = iniparser_getint(dict, childkey, 0);
+ m = iniparser_getint(dict, maxkey, 10);
+
+ // Launch the child process
+ #ifdef _WIN32
+ launchWindows(c, m);
+ #else
+ sprintf(cmd, "./proclaunch %d %d &", c, m);
+ system(cmd);
+ #endif
+
+ // Get the next child entry
+ token = strtok(NULL, ",");
+ }
+ }
+ iniparser_freedict(dict);
+ } else {
+ // Child Process operation - put on your recursive thinking cap
+ char cmd[25];
+ // This is launching grandchildren, there are no great grandchildren, so we
+ // pass in a 0 for the children to spawn.
+ #ifdef _WIN32
+ while(children > 0) {
+ launchWindows(0, maxtime);
+ children--;
+ }
+ #else
+ sprintf(cmd, "./proclaunch %d %d &", 0, maxtime);
+ printf("Launching child process: %s\n", cmd);
+ while (children > 0) {
+ system(cmd);
+ children--;
+ }
+ #endif
+ }
+
+ /* Now we have launched all the children. Let's wait for max time before returning
+ This does pseudo busy waiting just to appear active */
+ while (passedtime < maxtime) {
+#ifdef _WIN32
+ Sleep(1000);
+#else
+ sleep(1);
+#endif
+ passedtime++;
+ }
+ exit(0);
+ return 0;
+}
diff --git a/testing/mozbase/mozprocess/tests/proclaunch.py b/testing/mozbase/mozprocess/tests/proclaunch.py
new file mode 100644
index 000000000..ad06a23a1
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/proclaunch.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+
+import argparse
+import collections
+import ConfigParser
+import multiprocessing
+import time
+
+ProcessNode = collections.namedtuple('ProcessNode', ['maxtime', 'children'])
+
+
+class ProcessLauncher(object):
+
+ """ Create and Launch process trees specified by a '.ini' file
+
+ Typical .ini file accepted by this class :
+
+ [main]
+ children=c1, 1*c2, 4*c3
+ maxtime=10
+
+ [c1]
+ children= 2*c2, c3
+ maxtime=20
+
+ [c2]
+ children=3*c3
+ maxtime=5
+
+ [c3]
+ maxtime=3
+
+ This generates a process tree of the form:
+ [main]
+ |---[c1]
+ | |---[c2]
+ | | |---[c3]
+ | | |---[c3]
+ | | |---[c3]
+ | |
+ | |---[c2]
+ | | |---[c3]
+ | | |---[c3]
+ | | |---[c3]
+ | |
+ | |---[c3]
+ |
+ |---[c2]
+ | |---[c3]
+ | |---[c3]
+ | |---[c3]
+ |
+ |---[c3]
+ |---[c3]
+ |---[c3]
+
+ Caveat: The section names cannot contain a '*'(asterisk) or a ','(comma)
+ character as these are used as delimiters for parsing.
+ """
+
+ # Unit time for processes in seconds
+ UNIT_TIME = 1
+
+ def __init__(self, manifest, verbose=False):
+ """
+ Parses the manifest and stores the information about the process tree
+ in a format usable by the class.
+
+ Raises IOError if :
+ - The path does not exist
+ - The file cannot be read
+ Raises ConfigParser.*Error if:
+ - Files does not contain section headers
+ - File cannot be parsed because of incorrect specification
+
+ :param manifest: Path to the manifest file that contains the
+ configuration for the process tree to be launched
+ :verbose: Print the process start and end information.
+ Genrates a lot of output. Disabled by default.
+ """
+
+ self.verbose = verbose
+
+ # Children is a dictionary used to store information from the,
+ # Configuration file in a more usable format.
+ # Key : string contain the name of child process
+ # Value : A Named tuple of the form (max_time, (list of child processes of Key))
+ # Where each child process is a list of type: [count to run, name of child]
+ self.children = {}
+
+ cfgparser = ConfigParser.ConfigParser()
+
+ if not cfgparser.read(manifest):
+ raise IOError('The manifest %s could not be found/opened', manifest)
+
+ sections = cfgparser.sections()
+ for section in sections:
+ # Maxtime is a mandatory option
+ # ConfigParser.NoOptionError is raised if maxtime does not exist
+ if '*' in section or ',' in section:
+ raise ConfigParser.ParsingError(
+ "%s is not a valid section name. "
+ "Section names cannot contain a '*' or ','." % section)
+ m_time = cfgparser.get(section, 'maxtime')
+ try:
+ m_time = int(m_time)
+ except ValueError:
+ raise ValueError('Expected maxtime to be an integer, specified %s' % m_time)
+
+ # No children option implies there are no further children
+ # Leaving the children option blank is an error.
+ try:
+ c = cfgparser.get(section, 'children')
+ if not c:
+ # If children is an empty field, assume no children
+ children = None
+
+ else:
+ # Tokenize chilren field, ignore empty strings
+ children = [[y.strip() for y in x.strip().split('*', 1)]
+ for x in c.split(',') if x]
+ try:
+ for i, child in enumerate(children):
+ # No multiplicate factor infront of a process implies 1
+ if len(child) == 1:
+ children[i] = [1, child[0]]
+ else:
+ children[i][0] = int(child[0])
+
+ if children[i][1] not in sections:
+ raise ConfigParser.ParsingError(
+ 'No section corresponding to child %s' % child[1])
+ except ValueError:
+ raise ValueError(
+ 'Expected process count to be an integer, specified %s' % child[0])
+
+ except ConfigParser.NoOptionError:
+ children = None
+ pn = ProcessNode(maxtime=m_time,
+ children=children)
+ self.children[section] = pn
+
+ def run(self):
+ """
+ This function launches the process tree.
+ """
+ self._run('main', 0)
+
+ def _run(self, proc_name, level):
+ """
+ Runs the process specified by the section-name `proc_name` in the manifest file.
+ Then makes calls to launch the child processes of `proc_name`
+
+ :param proc_name: File name of the manifest as a string.
+ :param level: Depth of the current process in the tree.
+ """
+ if proc_name not in self.children.keys():
+ raise IOError("%s is not a valid process" % proc_name)
+
+ maxtime = self.children[proc_name].maxtime
+ if self.verbose:
+ print "%sLaunching %s for %d*%d seconds" % (" " * level,
+ proc_name,
+ maxtime,
+ self.UNIT_TIME)
+
+ while self.children[proc_name].children:
+ child = self.children[proc_name].children.pop()
+
+ count, child_proc = child
+ for i in range(count):
+ p = multiprocessing.Process(target=self._run, args=(child[1], level + 1))
+ p.start()
+
+ self._launch(maxtime)
+ if self.verbose:
+ print "%sFinished %s" % (" " * level, proc_name)
+
+ def _launch(self, running_time):
+ """
+ Create and launch a process and idles for the time specified by
+ `running_time`
+
+ :param running_time: Running time of the process in seconds.
+ """
+ elapsed_time = 0
+
+ while elapsed_time < running_time:
+ time.sleep(self.UNIT_TIME)
+ elapsed_time += self.UNIT_TIME
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("manifest", help="Specify the configuration .ini file")
+ args = parser.parse_args()
+
+ proclaunch = ProcessLauncher(args.manifest)
+ proclaunch.run()
diff --git a/testing/mozbase/mozprocess/tests/procnonewline.py b/testing/mozbase/mozprocess/tests/procnonewline.py
new file mode 100644
index 000000000..428a02bcb
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/procnonewline.py
@@ -0,0 +1,3 @@
+import sys
+print "this is a newline"
+sys.stdout.write("this has NO newline")
diff --git a/testing/mozbase/mozprocess/tests/proctest.py b/testing/mozbase/mozprocess/tests/proctest.py
new file mode 100644
index 000000000..62ccf940c
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/proctest.py
@@ -0,0 +1,52 @@
+import os
+import sys
+import unittest
+import psutil
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ cls.proclaunch = os.path.join(here, "proclaunch.py")
+ cls.python = sys.executable
+
+ def determine_status(self, proc, isalive=False, expectedfail=()):
+ """
+ Use to determine if the situation has failed.
+ Parameters:
+ proc -- the processhandler instance
+ isalive -- Use True to indicate we pass if the process exists; however, by default
+ the test will pass if the process does not exist (isalive == False)
+ expectedfail -- Defaults to [], used to indicate a list of fields
+ that are expected to fail
+ """
+ returncode = proc.proc.returncode
+ didtimeout = proc.didTimeout
+ detected = psutil.pid_exists(proc.pid)
+ output = ''
+ # ProcessHandler has output when store_output is set to True in the constructor
+ # (this is the default)
+ if getattr(proc, 'output'):
+ output = proc.output
+
+ if 'returncode' in expectedfail:
+ self.assertTrue(returncode, "Detected an unexpected return code of: %s" % returncode)
+ elif isalive:
+ self.assertEqual(returncode, None, "Detected not None return code of: %s" % returncode)
+ else:
+ self.assertNotEqual(returncode, None, "Detected unexpected None return code of")
+
+ if 'didtimeout' in expectedfail:
+ self.assertTrue(didtimeout, "Detected that process didn't time out")
+ else:
+ self.assertTrue(not didtimeout, "Detected that process timed out")
+
+ if isalive:
+ self.assertTrue(detected, "Detected process is not running, "
+ "process output: %s" % output)
+ else:
+ self.assertTrue(not detected, "Detected process is still running, "
+ "process output: %s" % output)
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess.py b/testing/mozbase/mozprocess/tests/test_mozprocess.py
new file mode 100644
index 000000000..bf8ba194c
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import subprocess
+import sys
+import unittest
+import proctest
+from mozprocess import processhandler
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+def make_proclaunch(aDir):
+ """
+ Makes the proclaunch executable.
+ Params:
+ aDir - the directory in which to issue the make commands
+ Returns:
+ the path to the proclaunch executable that is generated
+ """
+
+ if sys.platform == "win32":
+ exepath = os.path.join(aDir, "proclaunch.exe")
+ else:
+ exepath = os.path.join(aDir, "proclaunch")
+
+ # remove the launcher, if it already exists
+ # otherwise, if the make fails you may not notice
+ if os.path.exists(exepath):
+ os.remove(exepath)
+
+ # Ideally make should take care of both calls through recursion, but since it doesn't,
+ # on windows anyway (to file?), let's just call out both targets explicitly.
+ for command in [["make", "-C", "iniparser"],
+ ["make"]]:
+ process = subprocess.Popen(command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, cwd=aDir)
+ stdout, stderr = process.communicate()
+ if process.returncode:
+ # SomethingBadHappen; print all the things
+ print "%s: exit %d" % (command, process.returncode)
+ print "stdout:\n%s" % stdout
+ print "stderr:\n%s" % stderr
+ raise subprocess.CalledProcessError(process.returncode, command, stdout)
+
+ # ensure the launcher now exists
+ if not os.path.exists(exepath):
+ raise AssertionError("proclaunch executable '%s' "
+ "does not exist (sys.platform=%s)" % (exepath, sys.platform))
+ return exepath
+
+
+class ProcTest(proctest.ProcTest):
+
+ # whether to remove created files on exit
+ cleanup = os.environ.get('CLEANUP', 'true').lower() in ('1', 'true')
+
+ @classmethod
+ def setUpClass(cls):
+ cls.proclaunch = make_proclaunch(here)
+
+ @classmethod
+ def tearDownClass(cls):
+ del cls.proclaunch
+ if not cls.cleanup:
+ return
+ files = [('proclaunch',),
+ ('proclaunch.exe',),
+ ('iniparser', 'dictionary.o'),
+ ('iniparser', 'iniparser.lib'),
+ ('iniparser', 'iniparser.o'),
+ ('iniparser', 'libiniparser.a'),
+ ('iniparser', 'libiniparser.so.0'),
+ ]
+ files = [os.path.join(here, *path) for path in files]
+ errors = []
+ for path in files:
+ if os.path.exists(path):
+ try:
+ os.remove(path)
+ except OSError as e:
+ errors.append(str(e))
+ if errors:
+ raise OSError("Error(s) encountered tearing down "
+ "%s.%s:\n%s" % (cls.__module__, cls.__name__, '\n'.join(errors)))
+
+ def test_process_normal_finish(self):
+ """Process is started, runs to completion while we wait for it"""
+
+ p = processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
+ cwd=here)
+ p.run()
+ p.wait()
+
+ self.determine_status(p)
+
+ def test_commandline_no_args(self):
+ """Command line is reported correctly when no arguments are specified"""
+ p = processhandler.ProcessHandler(self.proclaunch, cwd=here)
+ self.assertEqual(p.commandline, self.proclaunch)
+
+ def test_commandline_overspecified(self):
+ """Command line raises an exception when the arguments are specified ambiguously"""
+ err = None
+ try:
+ processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
+ args=["1", "2", "3"],
+ cwd=here)
+ except TypeError, e:
+ err = e
+
+ self.assertTrue(err)
+
+ def test_commandline_from_list(self):
+ """Command line is reported correctly when command and arguments are specified in a list"""
+ p = processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
+ cwd=here)
+ self.assertEqual(p.commandline, self.proclaunch + ' process_normal_finish.ini')
+
+ def test_commandline_over_specified(self):
+ """Command line raises an exception when the arguments are specified ambiguously"""
+ err = None
+ try:
+ processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
+ args=["1", "2", "3"],
+ cwd=here)
+ except TypeError, e:
+ err = e
+
+ self.assertTrue(err)
+
+ def test_commandline_from_args(self):
+ """Command line is reported correctly when arguments are specified in a dedicated list"""
+ p = processhandler.ProcessHandler(self.proclaunch,
+ args=["1", "2", "3"],
+ cwd=here)
+ self.assertEqual(p.commandline, self.proclaunch + ' 1 2 3')
+
+ def test_process_wait(self):
+ """Process is started runs to completion while we wait indefinitely"""
+
+ p = processhandler.ProcessHandler([self.proclaunch,
+ "process_waittimeout_10s.ini"],
+ cwd=here)
+ p.run()
+ p.wait()
+
+ self.determine_status(p)
+
+ def test_process_timeout(self):
+ """ Process is started, runs but we time out waiting on it
+ to complete
+ """
+ p = processhandler.ProcessHandler([self.proclaunch, "process_waittimeout.ini"],
+ cwd=here)
+ p.run(timeout=10)
+ p.wait()
+
+ self.determine_status(p, False, ['returncode', 'didtimeout'])
+
+ def test_process_timeout_no_kill(self):
+ """ Process is started, runs but we time out waiting on it
+ to complete. Process should not be killed.
+ """
+ p = None
+
+ def timeout_handler():
+ self.assertEqual(p.proc.poll(), None)
+ p.kill()
+ p = processhandler.ProcessHandler([self.proclaunch, "process_waittimeout.ini"],
+ cwd=here,
+ onTimeout=(timeout_handler,),
+ kill_on_timeout=False)
+ p.run(timeout=1)
+ p.wait()
+ self.assertTrue(p.didTimeout)
+
+ self.determine_status(p, False, ['returncode', 'didtimeout'])
+
+ def test_process_waittimeout(self):
+ """
+ Process is started, then wait is called and times out.
+ Process is still running and didn't timeout
+ """
+ p = processhandler.ProcessHandler([self.proclaunch,
+ "process_waittimeout_10s.ini"],
+ cwd=here)
+
+ p.run()
+ p.wait(timeout=5)
+
+ self.determine_status(p, True, ())
+
+ def test_process_waitnotimeout(self):
+ """ Process is started, runs to completion before our wait times out
+ """
+ p = processhandler.ProcessHandler([self.proclaunch,
+ "process_waittimeout_10s.ini"],
+ cwd=here)
+ p.run(timeout=30)
+ p.wait()
+
+ self.determine_status(p)
+
+ def test_process_kill(self):
+ """Process is started, we kill it"""
+
+ p = processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
+ cwd=here)
+ p.run()
+ p.kill()
+
+ self.determine_status(p)
+
+ def test_process_output_twice(self):
+ """
+ Process is started, then processOutput is called a second time explicitly
+ """
+ p = processhandler.ProcessHandler([self.proclaunch,
+ "process_waittimeout_10s.ini"],
+ cwd=here)
+
+ p.run()
+ p.processOutput(timeout=5)
+ p.wait()
+
+ self.determine_status(p, False, ())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_kill.py b/testing/mozbase/mozprocess/tests/test_mozprocess_kill.py
new file mode 100644
index 000000000..36dbc95cc
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_kill.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+
+import os
+import time
+import unittest
+import proctest
+import signal
+from mozprocess import processhandler
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTestKill(proctest.ProcTest):
+ """ Class to test various process tree killing scenatios """
+
+ def test_kill_before_run(self):
+ """Process is not started, and kill() is called"""
+
+ p = processhandler.ProcessHandler([self.python, '-V'])
+ self.assertRaises(RuntimeError, p.kill)
+
+ def test_process_kill(self):
+ """Process is started, we kill it"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ p.run()
+ p.kill()
+
+ self.determine_status(p, expectedfail=('returncode',))
+
+ def test_process_kill_deep(self):
+ """Process is started, we kill it, we use a deep process tree"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_deep_python.ini"],
+ cwd=here)
+ p.run()
+ p.kill()
+
+ self.determine_status(p, expectedfail=('returncode',))
+
+ def test_process_kill_deep_wait(self):
+ """Process is started, we use a deep process tree, we let it spawn
+ for a bit, we kill it"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_deep_python.ini"],
+ cwd=here)
+ p.run()
+ # Let the tree spawn a bit, before attempting to kill
+ time.sleep(3)
+ p.kill()
+
+ self.determine_status(p, expectedfail=('returncode',))
+
+ def test_process_kill_broad(self):
+ """Process is started, we kill it, we use a broad process tree"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_broad_python.ini"],
+ cwd=here)
+ p.run()
+ p.kill()
+
+ self.determine_status(p, expectedfail=('returncode',))
+
+ @unittest.skipUnless(processhandler.isPosix, "posix only")
+ def test_process_kill_with_sigterm(self):
+ script = os.path.join(here, 'infinite_loop.py')
+ p = processhandler.ProcessHandler([self.python, script])
+
+ p.run()
+ p.kill()
+
+ self.assertEquals(p.proc.returncode, -signal.SIGTERM)
+
+ @unittest.skipUnless(processhandler.isPosix, "posix only")
+ def test_process_kill_with_sigint_if_needed(self):
+ script = os.path.join(here, 'infinite_loop.py')
+ p = processhandler.ProcessHandler([self.python, script, 'deadlock'])
+
+ p.run()
+ time.sleep(1)
+ p.kill()
+
+ self.assertEquals(p.proc.returncode, -signal.SIGKILL)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_kill_broad_wait.py b/testing/mozbase/mozprocess/tests/test_mozprocess_kill_broad_wait.py
new file mode 100644
index 000000000..cc8cef978
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_kill_broad_wait.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+import os
+import time
+import unittest
+import proctest
+from mozprocess import processhandler
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTestKill(proctest.ProcTest):
+ """ Class to test various process tree killing scenatios """
+
+ # This test should ideally be a part of test_mozprocess_kill.py
+ # It has been separated for the purpose of tempporarily disabling it.
+ # See https://bugzilla.mozilla.org/show_bug.cgi?id=921632
+ def test_process_kill_broad_wait(self):
+ """Process is started, we use a broad process tree, we let it spawn
+ for a bit, we kill it"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_broad_python.ini"],
+ cwd=here)
+ p.run()
+ # Let the tree spawn a bit, before attempting to kill
+ time.sleep(3)
+ p.kill()
+
+ self.determine_status(p, expectedfail=('returncode',))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_misc.py b/testing/mozbase/mozprocess/tests/test_mozprocess_misc.py
new file mode 100644
index 000000000..7a7c690d1
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_misc.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os
+import unittest
+import proctest
+from mozprocess import processhandler
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTestMisc(proctest.ProcTest):
+ """ Class to test misc operations """
+
+ def test_process_output_twice(self):
+ """
+ Process is started, then processOutput is called a second time explicitly
+ """
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_waittimeout_10s_python.ini"],
+ cwd=here)
+
+ p.run()
+ p.processOutput(timeout=5)
+ p.wait()
+
+ self.determine_status(p, False, ())
+
+ def test_unicode_in_environment(self):
+ env = {
+ 'FOOBAR': 'ʘ',
+ }
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here, env=env)
+ # passes if no exceptions are raised
+ p.run()
+ p.wait()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_output.py b/testing/mozbase/mozprocess/tests/test_mozprocess_output.py
new file mode 100644
index 000000000..e9ad26620
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_output.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+import io
+import os
+import unittest
+import proctest
+from mozprocess import processhandler
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTestOutput(proctest.ProcTest):
+ """ Class to test operations related to output handling """
+
+ def test_process_output_nonewline(self):
+ """
+ Process is started, outputs data with no newline
+ """
+ p = processhandler.ProcessHandler([self.python, "procnonewline.py"],
+ cwd=here)
+
+ p.run()
+ p.processOutput(timeout=5)
+ p.wait()
+
+ self.determine_status(p, False, ())
+
+ def test_stream_process_output(self):
+ """
+ Process output stream does not buffer
+ """
+ expected = '\n'.join([str(n) for n in range(0, 10)])
+
+ stream = io.BytesIO()
+ buf = io.BufferedRandom(stream)
+
+ p = processhandler.ProcessHandler([self.python, "proccountfive.py"],
+ cwd=here,
+ stream=buf)
+
+ p.run()
+ p.wait()
+ for i in range(5, 10):
+ stream.write(str(i) + '\n')
+
+ buf.flush()
+ self.assertEquals(stream.getvalue().strip(), expected)
+
+ # make sure mozprocess doesn't close the stream
+ # since mozprocess didn't create it
+ self.assertFalse(buf.closed)
+ buf.close()
+
+ self.determine_status(p, False, ())
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_params.py b/testing/mozbase/mozprocess/tests/test_mozprocess_params.py
new file mode 100644
index 000000000..d4d1f00f3
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_params.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from mozprocess import processhandler
+
+
+class ParamTests(unittest.TestCase):
+
+ def test_process_outputline_handler(self):
+ """Parameter processOutputLine is accepted with a single function"""
+ def output(line):
+ print("output " + str(line))
+ err = None
+ try:
+ processhandler.ProcessHandler(['ls', '-l'], processOutputLine=output)
+ except (TypeError, AttributeError) as e:
+ err = e
+ self.assertFalse(err)
+
+ def test_process_outputline_handler_list(self):
+ """Parameter processOutputLine is accepted with a list of functions"""
+ def output(line):
+ print("output " + str(line))
+ err = None
+ try:
+ processhandler.ProcessHandler(['ls', '-l'], processOutputLine=[output])
+ except (TypeError, AttributeError) as e:
+ err = e
+ self.assertFalse(err)
+
+ def test_process_ontimeout_handler(self):
+ """Parameter onTimeout is accepted with a single function"""
+ def timeout():
+ print("timeout!")
+ err = None
+ try:
+ processhandler.ProcessHandler(['sleep', '2'], onTimeout=timeout)
+ except (TypeError, AttributeError) as e:
+ err = e
+ self.assertFalse(err)
+
+ def test_process_ontimeout_handler_list(self):
+ """Parameter onTimeout is accepted with a list of functions"""
+ def timeout():
+ print("timeout!")
+ err = None
+ try:
+ processhandler.ProcessHandler(['sleep', '2'], onTimeout=[timeout])
+ except (TypeError, AttributeError) as e:
+ err = e
+ self.assertFalse(err)
+
+ def test_process_onfinish_handler(self):
+ """Parameter onFinish is accepted with a single function"""
+ def finish():
+ print("finished!")
+ err = None
+ try:
+ processhandler.ProcessHandler(['sleep', '1'], onFinish=finish)
+ except (TypeError, AttributeError) as e:
+ err = e
+ self.assertFalse(err)
+
+ def test_process_onfinish_handler_list(self):
+ """Parameter onFinish is accepted with a list of functions"""
+ def finish():
+ print("finished!")
+ err = None
+ try:
+ processhandler.ProcessHandler(['sleep', '1'], onFinish=[finish])
+ except (TypeError, AttributeError) as e:
+ err = e
+ self.assertFalse(err)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_poll.py b/testing/mozbase/mozprocess/tests/test_mozprocess_poll.py
new file mode 100644
index 000000000..a1ae070aa
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_poll.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+
+import os
+import signal
+import unittest
+
+from mozprocess import processhandler
+
+import proctest
+
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTestPoll(proctest.ProcTest):
+ """ Class to test process poll """
+
+ def test_poll_before_run(self):
+ """Process is not started, and poll() is called"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ self.assertRaises(RuntimeError, p.poll)
+
+ def test_poll_while_running(self):
+ """Process is started, and poll() is called"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ p.run()
+ returncode = p.poll()
+
+ self.assertEqual(returncode, None)
+
+ self.determine_status(p, True)
+ p.kill()
+
+ def test_poll_after_kill(self):
+ """Process is killed, and poll() is called"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ p.run()
+ returncode = p.kill()
+
+ # We killed the process, so the returncode should be < 0
+ self.assertLess(returncode, 0)
+ self.assertEqual(returncode, p.poll())
+
+ self.determine_status(p)
+
+ def test_poll_after_kill_no_process_group(self):
+ """Process (no group) is killed, and poll() is called"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_no_process_group.ini"],
+ cwd=here,
+ ignore_children=True
+ )
+ p.run()
+ returncode = p.kill()
+
+ # We killed the process, so the returncode should be < 0
+ self.assertLess(returncode, 0)
+ self.assertEqual(returncode, p.poll())
+
+ self.determine_status(p)
+
+ def test_poll_after_double_kill(self):
+ """Process is killed twice, and poll() is called"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ p.run()
+ p.kill()
+ returncode = p.kill()
+
+ # We killed the process, so the returncode should be < 0
+ self.assertLess(returncode, 0)
+ self.assertEqual(returncode, p.poll())
+
+ self.determine_status(p)
+
+ def test_poll_after_external_kill(self):
+ """Process is killed externally, and poll() is called"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ p.run()
+ os.kill(p.pid, signal.SIGTERM)
+ returncode = p.wait()
+
+ # We killed the process, so the returncode should be < 0
+ self.assertEqual(returncode, -signal.SIGTERM)
+ self.assertEqual(returncode, p.poll())
+
+ self.determine_status(p)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_mozprocess_wait.py b/testing/mozbase/mozprocess/tests/test_mozprocess_wait.py
new file mode 100644
index 000000000..df9e753ee
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_mozprocess_wait.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+
+import os
+import unittest
+import proctest
+import mozinfo
+from mozprocess import processhandler
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class ProcTestWait(proctest.ProcTest):
+ """ Class to test process waits and timeouts """
+
+ def test_normal_finish(self):
+ """Process is started, runs to completion while we wait for it"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_normal_finish_python.ini"],
+ cwd=here)
+ p.run()
+ p.wait()
+
+ self.determine_status(p)
+
+ def test_wait(self):
+ """Process is started runs to completion while we wait indefinitely"""
+
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_waittimeout_10s_python.ini"],
+ cwd=here)
+ p.run()
+ p.wait()
+
+ self.determine_status(p)
+
+ def test_timeout(self):
+ """ Process is started, runs but we time out waiting on it
+ to complete
+ """
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_waittimeout_python.ini"],
+ cwd=here)
+ p.run(timeout=10)
+ p.wait()
+
+ if mozinfo.isUnix:
+ # process was killed, so returncode should be negative
+ self.assertLess(p.proc.returncode, 0)
+
+ self.determine_status(p, False, ['returncode', 'didtimeout'])
+
+ def test_waittimeout(self):
+ """
+ Process is started, then wait is called and times out.
+ Process is still running and didn't timeout
+ """
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_waittimeout_10s_python.ini"],
+ cwd=here)
+
+ p.run()
+ p.wait(timeout=5)
+
+ self.determine_status(p, True, ())
+
+ def test_waitnotimeout(self):
+ """ Process is started, runs to completion before our wait times out
+ """
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_waittimeout_10s_python.ini"],
+ cwd=here)
+ p.run(timeout=30)
+ p.wait()
+
+ self.determine_status(p)
+
+ def test_wait_twice_after_kill(self):
+ """Bug 968718: Process is started and stopped. wait() twice afterward."""
+ p = processhandler.ProcessHandler([self.python, self.proclaunch,
+ "process_waittimeout_python.ini"],
+ cwd=here)
+ p.run()
+ p.kill()
+ returncode1 = p.wait()
+ returncode2 = p.wait()
+
+ self.determine_status(p)
+
+ self.assertLess(returncode2, 0,
+ 'Negative returncode expected, got "%s"' % returncode2)
+ self.assertEqual(returncode1, returncode2,
+ 'Expected both returncodes of wait() to be equal')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprocess/tests/test_process_reader.py b/testing/mozbase/mozprocess/tests/test_process_reader.py
new file mode 100644
index 000000000..0cf84d9a4
--- /dev/null
+++ b/testing/mozbase/mozprocess/tests/test_process_reader.py
@@ -0,0 +1,101 @@
+import unittest
+import subprocess
+import sys
+from mozprocess.processhandler import ProcessReader, StoreOutput
+
+
+def run_python(str_code, stdout=subprocess.PIPE, stderr=subprocess.PIPE):
+ cmd = [sys.executable, '-c', str_code]
+ return subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
+
+
+class TestProcessReader(unittest.TestCase):
+
+ def setUp(self):
+ self.out = StoreOutput()
+ self.err = StoreOutput()
+ self.finished = False
+
+ def on_finished():
+ self.finished = True
+ self.timeout = False
+
+ def on_timeout():
+ self.timeout = True
+ self.reader = ProcessReader(stdout_callback=self.out,
+ stderr_callback=self.err,
+ finished_callback=on_finished,
+ timeout_callback=on_timeout)
+
+ def test_stdout_callback(self):
+ proc = run_python('print 1; print 2')
+ self.reader.start(proc)
+ self.reader.thread.join()
+
+ self.assertEqual(self.out.output, ['1', '2'])
+ self.assertEqual(self.err.output, [])
+
+ def test_stderr_callback(self):
+ proc = run_python('import sys; sys.stderr.write("hello world\\n")')
+ self.reader.start(proc)
+ self.reader.thread.join()
+
+ self.assertEqual(self.out.output, [])
+ self.assertEqual(self.err.output, ['hello world'])
+
+ def test_stdout_and_stderr_callbacks(self):
+ proc = run_python('import sys; sys.stderr.write("hello world\\n"); print 1; print 2')
+ self.reader.start(proc)
+ self.reader.thread.join()
+
+ self.assertEqual(self.out.output, ['1', '2'])
+ self.assertEqual(self.err.output, ['hello world'])
+
+ def test_finished_callback(self):
+ self.assertFalse(self.finished)
+ proc = run_python('')
+ self.reader.start(proc)
+ self.reader.thread.join()
+ self.assertTrue(self.finished)
+
+ def test_timeout(self):
+ self.reader.timeout = 0.05
+ self.assertFalse(self.timeout)
+ proc = run_python('import time; time.sleep(0.1)')
+ self.reader.start(proc)
+ self.reader.thread.join()
+ self.assertTrue(self.timeout)
+ self.assertFalse(self.finished)
+
+ def test_output_timeout(self):
+ self.reader.output_timeout = 0.05
+ self.assertFalse(self.timeout)
+ proc = run_python('import time; time.sleep(0.1)')
+ self.reader.start(proc)
+ self.reader.thread.join()
+ self.assertTrue(self.timeout)
+ self.assertFalse(self.finished)
+
+ def test_read_without_eol(self):
+ proc = run_python('import sys; sys.stdout.write("1")')
+ self.reader.start(proc)
+ self.reader.thread.join()
+ self.assertEqual(self.out.output, ['1'])
+
+ def test_read_with_strange_eol(self):
+ proc = run_python('import sys; sys.stdout.write("1\\r\\r\\r\\n")')
+ self.reader.start(proc)
+ self.reader.thread.join()
+ self.assertEqual(self.out.output, ['1'])
+
+ def test_mixed_stdout_stderr(self):
+ proc = run_python('import sys; sys.stderr.write("hello world\\n"); print 1; print 2',
+ stderr=subprocess.STDOUT)
+ self.reader.start(proc)
+ self.reader.thread.join()
+
+ self.assertEqual(sorted(self.out.output), sorted(['1', '2', 'hello world']))
+ self.assertEqual(self.err.output, [])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/mozprofile/__init__.py b/testing/mozbase/mozprofile/mozprofile/__init__.py
new file mode 100644
index 000000000..96bf1020b
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/__init__.py
@@ -0,0 +1,21 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+To use mozprofile as an API you can import mozprofile.profile_ and/or the AddonManager_.
+
+``mozprofile.profile`` features a generic ``Profile`` class. In addition,
+subclasses ``FirefoxProfile`` and ``ThundebirdProfile`` are available
+with preset preferences for those applications.
+"""
+
+from addons import *
+from cli import *
+from diff import *
+from permissions import *
+from prefs import *
+from profile import *
+from view import *
+from webapps import *
diff --git a/testing/mozbase/mozprofile/mozprofile/addons.py b/testing/mozbase/mozprofile/mozprofile/addons.py
new file mode 100644
index 000000000..e96fd6b36
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/addons.py
@@ -0,0 +1,410 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import sys
+import tempfile
+import urllib2
+import zipfile
+from xml.dom import minidom
+
+import mozfile
+from mozlog.unstructured import getLogger
+
+# Needed for the AMO's rest API -
+# https://developer.mozilla.org/en/addons.mozilla.org_%28AMO%29_API_Developers%27_Guide/The_generic_AMO_API
+AMO_API_VERSION = "1.5"
+
+# Logger for 'mozprofile.addons' module
+module_logger = getLogger(__name__)
+
+
+class AddonFormatError(Exception):
+ """Exception for not well-formed add-on manifest files"""
+
+
+class AddonManager(object):
+ """
+ Handles all operations regarding addons in a profile including:
+ installing and cleaning addons
+ """
+
+ def __init__(self, profile, restore=True):
+ """
+ :param profile: the path to the profile for which we install addons
+ :param restore: whether to reset to the previous state on instance garbage collection
+ """
+ self.profile = profile
+ self.restore = restore
+
+ # Initialize all class members
+ self._internal_init()
+
+ def _internal_init(self):
+ """Internal: Initialize all class members to their default value"""
+
+ # Add-ons installed; needed for cleanup
+ self._addons = []
+
+ # Backup folder for already existing addons
+ self.backup_dir = None
+
+ # Add-ons downloaded and which have to be removed from the file system
+ self.downloaded_addons = []
+
+ # Information needed for profile reset (see http://bit.ly/17JesUf)
+ self.installed_addons = []
+ self.installed_manifests = []
+
+ def __del__(self):
+ # reset to pre-instance state
+ if self.restore:
+ self.clean()
+
+ def clean(self):
+ """Clean up addons in the profile."""
+
+ # Remove all add-ons installed
+ for addon in self._addons:
+ # TODO (bug 934642)
+ # Once we have a proper handling of add-ons we should kill the id
+ # from self._addons once the add-on is removed. For now lets forget
+ # about the exception
+ try:
+ self.remove_addon(addon)
+ except IOError:
+ pass
+
+ # Remove all downloaded add-ons
+ for addon in self.downloaded_addons:
+ mozfile.remove(addon)
+
+ # restore backups
+ if self.backup_dir and os.path.isdir(self.backup_dir):
+ extensions_path = os.path.join(self.profile, 'extensions', 'staged')
+
+ for backup in os.listdir(self.backup_dir):
+ backup_path = os.path.join(self.backup_dir, backup)
+ shutil.move(backup_path, extensions_path)
+
+ if not os.listdir(self.backup_dir):
+ mozfile.remove(self.backup_dir)
+
+ # reset instance variables to defaults
+ self._internal_init()
+
+ @classmethod
+ def download(self, url, target_folder=None):
+ """
+ Downloads an add-on from the specified URL to the target folder
+
+ :param url: URL of the add-on (XPI file)
+ :param target_folder: Folder to store the XPI file in
+
+ """
+ response = urllib2.urlopen(url)
+ fd, path = tempfile.mkstemp(suffix='.xpi')
+ os.write(fd, response.read())
+ os.close(fd)
+
+ if not self.is_addon(path):
+ mozfile.remove(path)
+ raise AddonFormatError('Not a valid add-on: %s' % url)
+
+ # Give the downloaded file a better name by using the add-on id
+ details = self.addon_details(path)
+ new_path = path.replace('.xpi', '_%s.xpi' % details.get('id'))
+
+ # Move the add-on to the target folder if requested
+ if target_folder:
+ new_path = os.path.join(target_folder, os.path.basename(new_path))
+
+ os.rename(path, new_path)
+
+ return new_path
+
+ def get_addon_path(self, addon_id):
+ """Returns the path to the installed add-on
+
+ :param addon_id: id of the add-on to retrieve the path from
+ """
+ # By default we should expect add-ons being located under the
+ # extensions folder. Only if the application hasn't been run and
+ # installed the add-ons yet, it will be located under 'staged'.
+ # Also add-ons could have been unpacked by the application.
+ extensions_path = os.path.join(self.profile, 'extensions')
+ paths = [os.path.join(extensions_path, addon_id),
+ os.path.join(extensions_path, addon_id + '.xpi'),
+ os.path.join(extensions_path, 'staged', addon_id),
+ os.path.join(extensions_path, 'staged', addon_id + '.xpi')]
+ for path in paths:
+ if os.path.exists(path):
+ return path
+
+ raise IOError('Add-on not found: %s' % addon_id)
+
+ @classmethod
+ def is_addon(self, addon_path):
+ """
+ Checks if the given path is a valid addon
+
+ :param addon_path: path to the add-on directory or XPI
+ """
+ try:
+ self.addon_details(addon_path)
+ return True
+ except AddonFormatError:
+ return False
+
+ def install_addons(self, addons=None, manifests=None):
+ """
+ Installs all types of addons
+
+ :param addons: a list of addon paths to install
+ :param manifest: a list of addon manifests to install
+ """
+
+ # install addon paths
+ if addons:
+ if isinstance(addons, basestring):
+ addons = [addons]
+ for addon in set(addons):
+ self.install_from_path(addon)
+
+ # install addon manifests
+ if manifests:
+ if isinstance(manifests, basestring):
+ manifests = [manifests]
+ for manifest in manifests:
+ self.install_from_manifest(manifest)
+
+ def install_from_manifest(self, filepath):
+ """
+ Installs addons from a manifest
+ :param filepath: path to the manifest of addons to install
+ """
+ try:
+ from manifestparser import ManifestParser
+ except ImportError:
+ module_logger.critical(
+ "Installing addons from manifest requires the"
+ " manifestparser package to be installed.")
+ raise
+
+ manifest = ManifestParser()
+ manifest.read(filepath)
+ addons = manifest.get()
+
+ for addon in addons:
+ if '://' in addon['path'] or os.path.exists(addon['path']):
+ self.install_from_path(addon['path'])
+ continue
+
+ # No path specified, try to grab it off AMO
+ locale = addon.get('amo_locale', 'en_US')
+ query = 'https://services.addons.mozilla.org/' + locale + '/firefox/api/' \
+ + AMO_API_VERSION + '/'
+ if 'amo_id' in addon:
+ # this query grabs information on the addon base on its id
+ query += 'addon/' + addon['amo_id']
+ else:
+ # this query grabs information on the first addon returned from a search
+ query += 'search/' + addon['name'] + '/default/1'
+ install_path = AddonManager.get_amo_install_path(query)
+ self.install_from_path(install_path)
+
+ self.installed_manifests.append(filepath)
+
+ @classmethod
+ def get_amo_install_path(self, query):
+ """
+ Get the addon xpi install path for the specified AMO query.
+
+ :param query: query-documentation_
+
+ .. _query-documentation: https://developer.mozilla.org/en/addons.mozilla.org_%28AMO%29_API_Developers%27_Guide/The_generic_AMO_API # noqa
+ """
+ response = urllib2.urlopen(query)
+ dom = minidom.parseString(response.read())
+ for node in dom.getElementsByTagName('install')[0].childNodes:
+ if node.nodeType == node.TEXT_NODE:
+ return node.data
+
+ @classmethod
+ def addon_details(cls, addon_path):
+ """
+ Returns a dictionary of details about the addon.
+
+ :param addon_path: path to the add-on directory or XPI
+
+ Returns::
+
+ {'id': u'rainbow@colors.org', # id of the addon
+ 'version': u'1.4', # version of the addon
+ 'name': u'Rainbow', # name of the addon
+ 'unpack': False } # whether to unpack the addon
+ """
+
+ details = {
+ 'id': None,
+ 'unpack': False,
+ 'name': None,
+ 'version': None
+ }
+
+ def get_namespace_id(doc, url):
+ attributes = doc.documentElement.attributes
+ namespace = ""
+ for i in range(attributes.length):
+ if attributes.item(i).value == url:
+ if ":" in attributes.item(i).name:
+ # If the namespace is not the default one remove 'xlmns:'
+ namespace = attributes.item(i).name.split(':')[1] + ":"
+ break
+ return namespace
+
+ def get_text(element):
+ """Retrieve the text value of a given node"""
+ rc = []
+ for node in element.childNodes:
+ if node.nodeType == node.TEXT_NODE:
+ rc.append(node.data)
+ return ''.join(rc).strip()
+
+ if not os.path.exists(addon_path):
+ raise IOError('Add-on path does not exist: %s' % addon_path)
+
+ try:
+ if zipfile.is_zipfile(addon_path):
+ # Bug 944361 - We cannot use 'with' together with zipFile because
+ # it will cause an exception thrown in Python 2.6.
+ try:
+ compressed_file = zipfile.ZipFile(addon_path, 'r')
+ manifest = compressed_file.read('install.rdf')
+ finally:
+ compressed_file.close()
+ elif os.path.isdir(addon_path):
+ with open(os.path.join(addon_path, 'install.rdf'), 'r') as f:
+ manifest = f.read()
+ else:
+ raise IOError('Add-on path is neither an XPI nor a directory: %s' % addon_path)
+ except (IOError, KeyError) as e:
+ raise AddonFormatError(str(e)), None, sys.exc_info()[2]
+
+ try:
+ doc = minidom.parseString(manifest)
+
+ # Get the namespaces abbreviations
+ em = get_namespace_id(doc, 'http://www.mozilla.org/2004/em-rdf#')
+ rdf = get_namespace_id(doc, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
+
+ description = doc.getElementsByTagName(rdf + 'Description').item(0)
+ for entry, value in description.attributes.items():
+ # Remove the namespace prefix from the tag for comparison
+ entry = entry.replace(em, "")
+ if entry in details.keys():
+ details.update({entry: value})
+ for node in description.childNodes:
+ # Remove the namespace prefix from the tag for comparison
+ entry = node.nodeName.replace(em, "")
+ if entry in details.keys():
+ details.update({entry: get_text(node)})
+ except Exception as e:
+ raise AddonFormatError(str(e)), None, sys.exc_info()[2]
+
+ # turn unpack into a true/false value
+ if isinstance(details['unpack'], basestring):
+ details['unpack'] = details['unpack'].lower() == 'true'
+
+ # If no ID is set, the add-on is invalid
+ if details.get('id') is None:
+ raise AddonFormatError('Add-on id could not be found.')
+
+ return details
+
+ def install_from_path(self, path, unpack=False):
+ """
+ Installs addon from a filepath, url or directory of addons in the profile.
+
+ :param path: url, path to .xpi, or directory of addons
+ :param unpack: whether to unpack unless specified otherwise in the install.rdf
+ """
+
+ # if the addon is a URL, download it
+ # note that this won't work with protocols urllib2 doesn't support
+ if mozfile.is_url(path):
+ path = self.download(path)
+ self.downloaded_addons.append(path)
+
+ addons = [path]
+
+ # if path is not an add-on, try to install all contained add-ons
+ try:
+ self.addon_details(path)
+ except AddonFormatError as e:
+ module_logger.warning('Could not install %s: %s' % (path, str(e)))
+
+ # If the path doesn't exist, then we don't really care, just return
+ if not os.path.isdir(path):
+ return
+
+ addons = [os.path.join(path, x) for x in os.listdir(path) if
+ self.is_addon(os.path.join(path, x))]
+ addons.sort()
+
+ # install each addon
+ for addon in addons:
+ # determine the addon id
+ addon_details = self.addon_details(addon)
+ addon_id = addon_details.get('id')
+
+ # if the add-on has to be unpacked force it now
+ # note: we might want to let Firefox do it in case of addon details
+ orig_path = None
+ if os.path.isfile(addon) and (unpack or addon_details['unpack']):
+ orig_path = addon
+ addon = tempfile.mkdtemp()
+ mozfile.extract(orig_path, addon)
+
+ # copy the addon to the profile
+ extensions_path = os.path.join(self.profile, 'extensions', 'staged')
+ addon_path = os.path.join(extensions_path, addon_id)
+
+ if os.path.isfile(addon):
+ addon_path += '.xpi'
+
+ # move existing xpi file to backup location to restore later
+ if os.path.exists(addon_path):
+ self.backup_dir = self.backup_dir or tempfile.mkdtemp()
+ shutil.move(addon_path, self.backup_dir)
+
+ # copy new add-on to the extension folder
+ if not os.path.exists(extensions_path):
+ os.makedirs(extensions_path)
+ shutil.copy(addon, addon_path)
+ else:
+ # move existing folder to backup location to restore later
+ if os.path.exists(addon_path):
+ self.backup_dir = self.backup_dir or tempfile.mkdtemp()
+ shutil.move(addon_path, self.backup_dir)
+
+ # copy new add-on to the extension folder
+ shutil.copytree(addon, addon_path, symlinks=True)
+
+ # if we had to extract the addon, remove the temporary directory
+ if orig_path:
+ mozfile.remove(addon)
+ addon = orig_path
+
+ self._addons.append(addon_id)
+ self.installed_addons.append(addon)
+
+ def remove_addon(self, addon_id):
+ """Remove the add-on as specified by the id
+
+ :param addon_id: id of the add-on to be removed
+ """
+ path = self.get_addon_path(addon_id)
+ mozfile.remove(path)
diff --git a/testing/mozbase/mozprofile/mozprofile/cli.py b/testing/mozbase/mozprofile/mozprofile/cli.py
new file mode 100755
index 000000000..1dd513e56
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/cli.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Creates and/or modifies a Firefox profile.
+The profile can be modified by passing in addons to install or preferences to set.
+If no profile is specified, a new profile is created and the path of the
+resulting profile is printed.
+"""
+
+import sys
+from optparse import OptionParser
+from prefs import Preferences
+from profile import FirefoxProfile
+from profile import Profile
+
+__all__ = ['MozProfileCLI', 'cli']
+
+
+class MozProfileCLI(object):
+ """The Command Line Interface for ``mozprofile``."""
+
+ module = 'mozprofile'
+ profile_class = Profile
+
+ def __init__(self, args=sys.argv[1:], add_options=None):
+ self.parser = OptionParser(description=__doc__)
+ self.add_options(self.parser)
+ if add_options:
+ add_options(self.parser)
+ (self.options, self.args) = self.parser.parse_args(args)
+
+ def add_options(self, parser):
+
+ parser.add_option("-p", "--profile", dest="profile",
+ help="The path to the profile to operate on. "
+ "If none, creates a new profile in temp directory")
+ parser.add_option("-a", "--addon", dest="addons",
+ action="append", default=[],
+ help="Addon paths to install. Can be a filepath, "
+ "a directory containing addons, or a url")
+ parser.add_option("--addon-manifests", dest="addon_manifests",
+ action="append",
+ help="An addon manifest to install")
+ parser.add_option("--pref", dest="prefs",
+ action='append', default=[],
+ help="A preference to set. "
+ "Must be a key-value pair separated by a ':'")
+ parser.add_option("--preferences", dest="prefs_files",
+ action='append', default=[],
+ metavar="FILE",
+ help="read preferences from a JSON or INI file. "
+ "For INI, use 'file.ini:section' to specify a particular section.")
+
+ def profile_args(self):
+ """arguments to instantiate the profile class"""
+ return dict(profile=self.options.profile,
+ addons=self.options.addons,
+ addon_manifests=self.options.addon_manifests,
+ preferences=self.preferences())
+
+ def preferences(self):
+ """profile preferences"""
+
+ # object to hold preferences
+ prefs = Preferences()
+
+ # add preferences files
+ for prefs_file in self.options.prefs_files:
+ prefs.add_file(prefs_file)
+
+ # change CLI preferences into 2-tuples
+ separator = ':'
+ cli_prefs = []
+ for pref in self.options.prefs:
+ if separator not in pref:
+ self.parser.error("Preference must be a key-value pair separated by "
+ "a ':' (You gave: %s)" % pref)
+ cli_prefs.append(pref.split(separator, 1))
+
+ # string preferences
+ prefs.add(cli_prefs, cast=True)
+
+ return prefs()
+
+ def profile(self, restore=False):
+ """create the profile"""
+
+ kwargs = self.profile_args()
+ kwargs['restore'] = restore
+ return self.profile_class(**kwargs)
+
+
+def cli(args=sys.argv[1:]):
+ """ Handles the command line arguments for ``mozprofile`` via ``sys.argv``"""
+
+ # add a view method for this cli method only
+ def add_options(parser):
+ parser.add_option('--view', dest='view',
+ action='store_true', default=False,
+ help="view summary of profile following invocation")
+ parser.add_option('--firefox', dest='firefox_profile',
+ action='store_true', default=False,
+ help="use FirefoxProfile defaults")
+
+ # process the command line
+ cli = MozProfileCLI(args, add_options)
+
+ if cli.args:
+ cli.parser.error("Program doesn't support positional arguments.")
+
+ if cli.options.firefox_profile:
+ cli.profile_class = FirefoxProfile
+
+ # create the profile
+ profile = cli.profile()
+
+ if cli.options.view:
+ # view the profile, if specified
+ print profile.summary()
+ return
+
+ # if no profile was passed in print the newly created profile
+ if not cli.options.profile:
+ print profile.profile
+
+if __name__ == '__main__':
+ cli()
diff --git a/testing/mozbase/mozprofile/mozprofile/diff.py b/testing/mozbase/mozprofile/mozprofile/diff.py
new file mode 100644
index 000000000..98776e838
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/diff.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+"""
+diff two profile summaries
+"""
+
+import difflib
+import profile
+import optparse
+import os
+import sys
+
+__all__ = ['diff', 'diff_profiles']
+
+
+def diff(profile1, profile2, diff_function=difflib.unified_diff):
+
+ profiles = (profile1, profile2)
+ parts = {}
+ parts_dict = {}
+ for index in (0, 1):
+ prof = profiles[index]
+
+ # first part, the path, isn't useful for diffing
+ parts[index] = prof.summary(return_parts=True)[1:]
+
+ parts_dict[index] = dict(parts[index])
+
+ # keys the first profile is missing
+ first_missing = [i for i in parts_dict[1]
+ if i not in parts_dict[0]]
+ parts[0].extend([(i, '') for i in first_missing])
+
+ # diffs
+ retval = []
+ for key, value in parts[0]:
+ other = parts_dict[1].get(key, '')
+ value = value.strip()
+ other = other.strip()
+
+ if key == 'Files':
+ # first line of files is the path; we don't care to diff that
+ value = '\n'.join(value.splitlines()[1:])
+ if other:
+ other = '\n'.join(other.splitlines()[1:])
+
+ value = value.splitlines()
+ other = other.splitlines()
+ section_diff = list(diff_function(value, other, profile1.profile, profile2.profile))
+ if section_diff:
+ retval.append((key, '\n'.join(section_diff)))
+
+ return retval
+
+
+def diff_profiles(args=sys.argv[1:]):
+
+ # parse command line
+ usage = '%prog [options] profile1 profile2'
+ parser = optparse.OptionParser(usage=usage, description=__doc__)
+ options, args = parser.parse_args(args)
+ if len(args) != 2:
+ parser.error("Must give two profile paths")
+ missing = [arg for arg in args if not os.path.exists(arg)]
+ if missing:
+ parser.error("Profile not found: %s" % (', '.join(missing)))
+
+ # get the profile differences
+ diffs = diff(*([profile.Profile(arg)
+ for arg in args]))
+
+ # display them
+ while diffs:
+ key, value = diffs.pop(0)
+ print '[%s]:\n' % key
+ print value
+ if diffs:
+ print '-' * 4
+
+if __name__ == '__main__':
+ diff_profiles()
diff --git a/testing/mozbase/mozprofile/mozprofile/permissions.py b/testing/mozbase/mozprofile/mozprofile/permissions.py
new file mode 100644
index 000000000..ea13d96f0
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/permissions.py
@@ -0,0 +1,415 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""
+add permissions to the profile
+"""
+
+import codecs
+import os
+import sqlite3
+import urlparse
+
+__all__ = ['MissingPrimaryLocationError', 'MultiplePrimaryLocationsError',
+ 'DEFAULT_PORTS', 'DuplicateLocationError', 'BadPortLocationError',
+ 'LocationsSyntaxError', 'Location', 'ServerLocations',
+ 'Permissions']
+
+# http://hg.mozilla.org/mozilla-central/file/b871dfb2186f/build/automation.py.in#l28
+DEFAULT_PORTS = {'http': '8888',
+ 'https': '4443',
+ 'ws': '4443',
+ 'wss': '4443'}
+
+
+class LocationError(Exception):
+ """Signifies an improperly formed location."""
+
+ def __str__(self):
+ s = "Bad location"
+ if self.message:
+ s += ": %s" % self.message
+ return s
+
+
+class MissingPrimaryLocationError(LocationError):
+ """No primary location defined in locations file."""
+
+ def __init__(self):
+ LocationError.__init__(self, "missing primary location")
+
+
+class MultiplePrimaryLocationsError(LocationError):
+ """More than one primary location defined."""
+
+ def __init__(self):
+ LocationError.__init__(self, "multiple primary locations")
+
+
+class DuplicateLocationError(LocationError):
+ """Same location defined twice."""
+
+ def __init__(self, url):
+ LocationError.__init__(self, "duplicate location: %s" % url)
+
+
+class BadPortLocationError(LocationError):
+ """Location has invalid port value."""
+
+ def __init__(self, given_port):
+ LocationError.__init__(self, "bad value for port: %s" % given_port)
+
+
+class LocationsSyntaxError(Exception):
+ """Signifies a syntax error on a particular line in server-locations.txt."""
+
+ def __init__(self, lineno, err=None):
+ self.err = err
+ self.lineno = lineno
+
+ def __str__(self):
+ s = "Syntax error on line %s" % self.lineno
+ if self.err:
+ s += ": %s." % self.err
+ else:
+ s += "."
+ return s
+
+
+class Location(object):
+ """Represents a location line in server-locations.txt."""
+
+ attrs = ('scheme', 'host', 'port')
+
+ def __init__(self, scheme, host, port, options):
+ for attr in self.attrs:
+ setattr(self, attr, locals()[attr])
+ self.options = options
+ try:
+ int(self.port)
+ except ValueError:
+ raise BadPortLocationError(self.port)
+
+ def isEqual(self, location):
+ """compare scheme://host:port, but ignore options"""
+ return len([i for i in self.attrs
+ if getattr(self, i) == getattr(location, i)]) == len(self.attrs)
+
+ __eq__ = isEqual
+
+ def url(self):
+ return '%s://%s:%s' % (self.scheme, self.host, self.port)
+
+ def __str__(self):
+ return '%s %s' % (self.url(), ','.join(self.options))
+
+
+class ServerLocations(object):
+ """Iterable collection of locations.
+ Use provided functions to add new locations, rather that manipulating
+ _locations directly, in order to check for errors and to ensure the
+ callback is called, if given.
+ """
+
+ def __init__(self, filename=None, add_callback=None):
+ self.add_callback = add_callback
+ self._locations = []
+ self.hasPrimary = False
+ if filename:
+ self.read(filename)
+
+ def __iter__(self):
+ return self._locations.__iter__()
+
+ def __len__(self):
+ return len(self._locations)
+
+ def add(self, location, suppress_callback=False):
+ if "primary" in location.options:
+ if self.hasPrimary:
+ raise MultiplePrimaryLocationsError()
+ self.hasPrimary = True
+
+ self._locations.append(location)
+ if self.add_callback and not suppress_callback:
+ self.add_callback([location])
+
+ def add_host(self, host, port='80', scheme='http', options='privileged'):
+ if isinstance(options, basestring):
+ options = options.split(',')
+ self.add(Location(scheme, host, port, options))
+
+ def read(self, filename, check_for_primary=True):
+ """
+ Reads the file and adds all valid locations to the ``self._locations`` array.
+
+ :param filename: in the format of server-locations.txt_
+ :param check_for_primary: if True, a ``MissingPrimaryLocationError`` exception is raised
+ if no primary is found
+
+ .. _server-locations.txt: http://dxr.mozilla.org/mozilla-central/source/build/pgo/server-locations.txt # noqa
+
+ The only exception is that the port, if not defined, defaults to 80 or 443.
+
+ FIXME: Shouldn't this default to the protocol-appropriate port? Is
+ there any reason to have defaults at all?
+ """
+
+ locationFile = codecs.open(filename, "r", "UTF-8")
+ lineno = 0
+ new_locations = []
+
+ for line in locationFile:
+ line = line.strip()
+ lineno += 1
+
+ # check for comments and blank lines
+ if line.startswith("#") or not line:
+ continue
+
+ # split the server from the options
+ try:
+ server, options = line.rsplit(None, 1)
+ options = options.split(',')
+ except ValueError:
+ server = line
+ options = []
+
+ # parse the server url
+ if '://' not in server:
+ server = 'http://' + server
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(server)
+ # get the host and port
+ try:
+ host, port = netloc.rsplit(':', 1)
+ except ValueError:
+ host = netloc
+ port = DEFAULT_PORTS.get(scheme, '80')
+
+ try:
+ location = Location(scheme, host, port, options)
+ self.add(location, suppress_callback=True)
+ except LocationError as e:
+ raise LocationsSyntaxError(lineno, e)
+
+ new_locations.append(location)
+
+ # ensure that a primary is found
+ if check_for_primary and not self.hasPrimary:
+ raise LocationsSyntaxError(lineno + 1,
+ MissingPrimaryLocationError())
+
+ if self.add_callback:
+ self.add_callback(new_locations)
+
+
+class Permissions(object):
+ """Allows handling of permissions for ``mozprofile``"""
+
+ def __init__(self, profileDir, locations=None):
+ self._profileDir = profileDir
+ self._locations = ServerLocations(add_callback=self.write_db)
+ if locations:
+ if isinstance(locations, ServerLocations):
+ self._locations = locations
+ self._locations.add_callback = self.write_db
+ self.write_db(self._locations._locations)
+ elif isinstance(locations, list):
+ for l in locations:
+ self._locations.add_host(**l)
+ elif isinstance(locations, dict):
+ self._locations.add_host(**locations)
+ elif os.path.exists(locations):
+ self._locations.read(locations)
+
+ def write_db(self, locations):
+ """write permissions to the sqlite database"""
+
+ # Open database and create table
+ permDB = sqlite3.connect(os.path.join(self._profileDir, "permissions.sqlite"))
+ cursor = permDB.cursor()
+
+ # SQL copied from
+ # http://dxr.mozilla.org/mozilla-central/source/extensions/cookie/nsPermissionManager.cpp
+ cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
+ id INTEGER PRIMARY KEY
+ ,origin TEXT
+ ,type TEXT
+ ,permission INTEGER
+ ,expireType INTEGER
+ ,expireTime INTEGER
+ ,modificationTime INTEGER
+ )""")
+
+ rows = cursor.execute("PRAGMA table_info(moz_hosts)")
+ count = len(rows.fetchall())
+
+ using_origin = False
+ # if the db contains 7 columns, we're using user_version 5
+ if count == 7:
+ statement = "INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0, 0)"
+ cursor.execute("PRAGMA user_version=5;")
+ using_origin = True
+ # if the db contains 9 columns, we're using user_version 4
+ elif count == 9:
+ statement = "INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0, 0, 0, 0)"
+ cursor.execute("PRAGMA user_version=4;")
+ # if the db contains 8 columns, we're using user_version 3
+ elif count == 8:
+ statement = "INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0, 0, 0)"
+ cursor.execute("PRAGMA user_version=3;")
+ else:
+ statement = "INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0)"
+ cursor.execute("PRAGMA user_version=2;")
+
+ for location in locations:
+ # set the permissions
+ permissions = {'allowXULXBL': 'noxul' not in location.options}
+ for perm, allow in permissions.iteritems():
+ if allow:
+ permission_type = 1
+ else:
+ permission_type = 2
+
+ if using_origin:
+ # This is a crude approximation of the origin generation logic from
+ # nsPrincipal and nsStandardURL. It should suffice for the permissions
+ # which the test runners will want to insert into the system.
+ origin = location.scheme + "://" + location.host
+ if (location.scheme != 'http' or location.port != '80') and \
+ (location.scheme != 'https' or location.port != '443'):
+ origin += ':' + str(location.port)
+
+ cursor.execute(statement,
+ (origin, perm, permission_type))
+ else:
+ # The database is still using a legacy system based on hosts
+ # We can insert the permission as a host
+ #
+ # XXX This codepath should not be hit, as tests are run with
+ # fresh profiles. However, if it was hit, permissions would
+ # not be added to the database correctly (bug 1183185).
+ cursor.execute(statement,
+ (location.host, perm, permission_type))
+
+ # Commit and close
+ permDB.commit()
+ cursor.close()
+
+ def network_prefs(self, proxy=None):
+ """
+ take known locations and generate preferences to handle permissions and proxy
+ returns a tuple of prefs, user_prefs
+ """
+
+ prefs = []
+
+ if proxy:
+ user_prefs = self.pac_prefs(proxy)
+ else:
+ user_prefs = []
+
+ return prefs, user_prefs
+
+ def pac_prefs(self, user_proxy=None):
+ """
+ return preferences for Proxy Auto Config. originally taken from
+ http://dxr.mozilla.org/mozilla-central/source/build/automation.py.in
+ """
+ proxy = DEFAULT_PORTS.copy()
+
+ # We need to proxy every server but the primary one.
+ origins = ["'%s'" % l.url()
+ for l in self._locations]
+ origins = ", ".join(origins)
+ proxy["origins"] = origins
+
+ for l in self._locations:
+ if "primary" in l.options:
+ proxy["remote"] = l.host
+ proxy[l.scheme] = l.port
+
+ # overwrite defaults with user specified proxy
+ if isinstance(user_proxy, dict):
+ proxy.update(user_proxy)
+
+ # TODO: this should live in a template!
+ # If you must escape things in this string with backslashes, be aware
+ # of the multiple layers of escaping at work:
+ #
+ # - Python will unescape backslashes;
+ # - Writing out the prefs will escape things via JSON serialization;
+ # - The prefs file reader will unescape backslashes;
+ # - The JS engine parser will unescape backslashes.
+ pacURL = """data:text/plain,
+var knownOrigins = (function () {
+ return [%(origins)s].reduce(function(t, h) { t[h] = true; return t; }, {})
+})();
+var uriRegex = new RegExp('^([a-z][-a-z0-9+.]*)' +
+ '://' +
+ '(?:[^/@]*@)?' +
+ '(.*?)' +
+ '(?::(\\\\d+))?/');
+var defaultPortsForScheme = {
+ 'http': 80,
+ 'ws': 80,
+ 'https': 443,
+ 'wss': 443
+};
+var originSchemesRemap = {
+ 'ws': 'http',
+ 'wss': 'https'
+};
+var proxyForScheme = {
+ 'http': 'PROXY %(remote)s:%(http)s',
+ 'https': 'PROXY %(remote)s:%(https)s',
+ 'ws': 'PROXY %(remote)s:%(ws)s',
+ 'wss': 'PROXY %(remote)s:%(wss)s'
+};
+
+function FindProxyForURL(url, host)
+{
+ var matches = uriRegex.exec(url);
+ if (!matches)
+ return 'DIRECT';
+ var originalScheme = matches[1];
+ var host = matches[2];
+ var port = matches[3];
+ if (!port && originalScheme in defaultPortsForScheme) {
+ port = defaultPortsForScheme[originalScheme];
+ }
+ var schemeForOriginChecking = originSchemesRemap[originalScheme] || originalScheme;
+
+ var origin = schemeForOriginChecking + '://' + host + ':' + port;
+ if (!(origin in knownOrigins))
+ return 'DIRECT';
+ return proxyForScheme[originalScheme] || 'DIRECT';
+}""" % proxy
+ pacURL = "".join(pacURL.splitlines())
+
+ prefs = []
+ prefs.append(("network.proxy.type", 2))
+ prefs.append(("network.proxy.autoconfig_url", pacURL))
+
+ return prefs
+
+ def clean_db(self):
+ """Removed permissions added by mozprofile."""
+
+ sqlite_file = os.path.join(self._profileDir, "permissions.sqlite")
+ if not os.path.exists(sqlite_file):
+ return
+
+ # Open database and create table
+ permDB = sqlite3.connect(sqlite_file)
+ cursor = permDB.cursor()
+
+ # TODO: only delete values that we add, this would require sending
+ # in the full permissions object
+ cursor.execute("DROP TABLE IF EXISTS moz_hosts")
+
+ # Commit and close
+ permDB.commit()
+ cursor.close()
diff --git a/testing/mozbase/mozprofile/mozprofile/prefs.py b/testing/mozbase/mozprofile/mozprofile/prefs.py
new file mode 100644
index 000000000..b0eb01e28
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/prefs.py
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+user preferences
+"""
+
+import json
+import mozfile
+import os
+import tokenize
+from ConfigParser import SafeConfigParser as ConfigParser
+from StringIO import StringIO
+
+__all__ = ('PreferencesReadError', 'Preferences')
+
+
+class PreferencesReadError(Exception):
+ """read error for prefrences files"""
+
+
+class Preferences(object):
+ """assembly of preferences from various sources"""
+
+ def __init__(self, prefs=None):
+ self._prefs = []
+ if prefs:
+ self.add(prefs)
+
+ def add(self, prefs, cast=False):
+ """
+ :param prefs:
+ :param cast: whether to cast strings to value, e.g. '1' -> 1
+ """
+ # wants a list of 2-tuples
+ if isinstance(prefs, dict):
+ prefs = prefs.items()
+ if cast:
+ prefs = [(i, self.cast(j)) for i, j in prefs]
+ self._prefs += prefs
+
+ def add_file(self, path):
+ """a preferences from a file
+
+ :param path:
+ """
+ self.add(self.read(path))
+
+ def __call__(self):
+ return self._prefs
+
+ @classmethod
+ def cast(cls, value):
+ """
+ interpolate a preference from a string
+ from the command line or from e.g. an .ini file, there is no good way to denote
+ what type the preference value is, as natively it is a string
+
+ - integers will get cast to integers
+ - true/false will get cast to True/False
+ - anything enclosed in single quotes will be treated as a string
+ with the ''s removed from both sides
+ """
+
+ if not isinstance(value, basestring):
+ return value # no op
+ quote = "'"
+ if value == 'true':
+ return True
+ if value == 'false':
+ return False
+ try:
+ return int(value)
+ except ValueError:
+ pass
+ if value.startswith(quote) and value.endswith(quote):
+ value = value[1:-1]
+ return value
+
+ @classmethod
+ def read(cls, path):
+ """read preferences from a file"""
+
+ section = None # for .ini files
+ basename = os.path.basename(path)
+ if ':' in basename:
+ # section of INI file
+ path, section = path.rsplit(':', 1)
+
+ if not os.path.exists(path) and not mozfile.is_url(path):
+ raise PreferencesReadError("'%s' does not exist" % path)
+
+ if section:
+ try:
+ return cls.read_ini(path, section)
+ except PreferencesReadError:
+ raise
+ except Exception as e:
+ raise PreferencesReadError(str(e))
+
+ # try both JSON and .ini format
+ try:
+ return cls.read_json(path)
+ except Exception as e:
+ try:
+ return cls.read_ini(path)
+ except Exception as f:
+ for exception in e, f:
+ if isinstance(exception, PreferencesReadError):
+ raise exception
+ raise PreferencesReadError("Could not recognize format of %s" % path)
+
+ @classmethod
+ def read_ini(cls, path, section=None):
+ """read preferences from an .ini file"""
+
+ parser = ConfigParser()
+ parser.optionxform = str
+ parser.readfp(mozfile.load(path))
+
+ if section:
+ if section not in parser.sections():
+ raise PreferencesReadError("No section '%s' in %s" % (section, path))
+ retval = parser.items(section, raw=True)
+ else:
+ retval = parser.defaults().items()
+
+ # cast the preferences since .ini is just strings
+ return [(i, cls.cast(j)) for i, j in retval]
+
+ @classmethod
+ def read_json(cls, path):
+ """read preferences from a JSON blob"""
+
+ prefs = json.loads(mozfile.load(path).read())
+
+ if type(prefs) not in [list, dict]:
+ raise PreferencesReadError("Malformed preferences: %s" % path)
+ if isinstance(prefs, list):
+ if [i for i in prefs if type(i) != list or len(i) != 2]:
+ raise PreferencesReadError("Malformed preferences: %s" % path)
+ values = [i[1] for i in prefs]
+ elif isinstance(prefs, dict):
+ values = prefs.values()
+ else:
+ raise PreferencesReadError("Malformed preferences: %s" % path)
+ types = (bool, basestring, int)
+ if [i for i in values if not [isinstance(i, j) for j in types]]:
+ raise PreferencesReadError("Only bool, string, and int values allowed")
+ return prefs
+
+ @classmethod
+ def read_prefs(cls, path, pref_setter='user_pref', interpolation=None):
+ """
+ Read preferences from (e.g.) prefs.js
+
+ :param path: The path to the preference file to read.
+ :param pref_setter: The name of the function used to set preferences
+ in the preference file.
+ :param interpolation: If provided, a dict that will be passed
+ to str.format to interpolate preference values.
+ """
+
+ marker = '##//' # magical marker
+ lines = [i.strip() for i in mozfile.load(path).readlines()]
+ _lines = []
+ for line in lines:
+ if not line.startswith(pref_setter):
+ continue
+ if '//' in line:
+ line = line.replace('//', marker)
+ _lines.append(line)
+ string = '\n'.join(_lines)
+
+ # skip trailing comments
+ processed_tokens = []
+ f_obj = StringIO(string)
+ for token in tokenize.generate_tokens(f_obj.readline):
+ if token[0] == tokenize.COMMENT:
+ continue
+ processed_tokens.append(token[:2]) # [:2] gets around http://bugs.python.org/issue9974
+ string = tokenize.untokenize(processed_tokens)
+
+ retval = []
+
+ def pref(a, b):
+ if interpolation and isinstance(b, basestring):
+ b = b.format(**interpolation)
+ retval.append((a, b))
+ lines = [i.strip().rstrip(';') for i in string.split('\n') if i.strip()]
+
+ _globals = {'retval': retval, 'true': True, 'false': False}
+ _globals[pref_setter] = pref
+ for line in lines:
+ try:
+ eval(line, _globals, {})
+ except SyntaxError:
+ print line
+ raise
+
+ # de-magic the marker
+ for index, (key, value) in enumerate(retval):
+ if isinstance(value, basestring) and marker in value:
+ retval[index] = (key, value.replace(marker, '//'))
+
+ return retval
+
+ @classmethod
+ def write(cls, _file, prefs, pref_string='user_pref(%s, %s);'):
+ """write preferences to a file"""
+
+ if isinstance(_file, basestring):
+ f = file(_file, 'a')
+ else:
+ f = _file
+
+ if isinstance(prefs, dict):
+ # order doesn't matter
+ prefs = prefs.items()
+
+ # serialize -> JSON
+ _prefs = [(json.dumps(k), json.dumps(v))
+ for k, v in prefs]
+
+ # write the preferences
+ for _pref in _prefs:
+ print >> f, pref_string % _pref
+
+ # close the file if opened internally
+ if isinstance(_file, basestring):
+ f.close()
diff --git a/testing/mozbase/mozprofile/mozprofile/profile.py b/testing/mozbase/mozprofile/mozprofile/profile.py
new file mode 100644
index 000000000..b07b11449
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/profile.py
@@ -0,0 +1,454 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import time
+import tempfile
+import uuid
+
+from addons import AddonManager
+import mozfile
+from permissions import Permissions
+from prefs import Preferences
+from shutil import copytree
+from webapps import WebappCollection
+
+__all__ = ['Profile',
+ 'FirefoxProfile',
+ 'MetroFirefoxProfile',
+ 'ThunderbirdProfile']
+
+
+class Profile(object):
+ """Handles all operations regarding profile.
+
+ Creating new profiles, installing add-ons, setting preferences and
+ handling cleanup.
+
+ The files associated with the profile will be removed automatically after
+ the object is garbage collected: ::
+
+ profile = Profile()
+ print profile.profile # this is the path to the created profile
+ del profile
+ # the profile path has been removed from disk
+
+ :meth:`cleanup` is called under the hood to remove the profile files. You
+ can ensure this method is called (even in the case of exception) by using
+ the profile as a context manager: ::
+
+ with Profile() as profile:
+ # do things with the profile
+ pass
+ # profile.cleanup() has been called here
+ """
+
+ def __init__(self, profile=None, addons=None, addon_manifests=None, apps=None,
+ preferences=None, locations=None, proxy=None, restore=True):
+ """
+ :param profile: Path to the profile
+ :param addons: String of one or list of addons to install
+ :param addon_manifests: Manifest for addons (see http://bit.ly/17jQ7i6)
+ :param apps: Dictionary or class of webapps to install
+ :param preferences: Dictionary or class of preferences
+ :param locations: ServerLocations object
+ :param proxy: Setup a proxy
+ :param restore: Flag for removing all custom settings during cleanup
+ """
+ self._addons = addons
+ self._addon_manifests = addon_manifests
+ self._apps = apps
+ self._locations = locations
+ self._proxy = proxy
+
+ # Prepare additional preferences
+ if preferences:
+ if isinstance(preferences, dict):
+ # unordered
+ preferences = preferences.items()
+
+ # sanity check
+ assert not [i for i in preferences if len(i) != 2]
+ else:
+ preferences = []
+ self._preferences = preferences
+
+ # Handle profile creation
+ self.create_new = not profile
+ if profile:
+ # Ensure we have a full path to the profile
+ self.profile = os.path.abspath(os.path.expanduser(profile))
+ else:
+ self.profile = tempfile.mkdtemp(suffix='.mozrunner')
+
+ self.restore = restore
+
+ # Initialize all class members
+ self._internal_init()
+
+ def _internal_init(self):
+ """Internal: Initialize all class members to their default value"""
+
+ if not os.path.exists(self.profile):
+ os.makedirs(self.profile)
+
+ # Preferences files written to
+ self.written_prefs = set()
+
+ # Our magic markers
+ nonce = '%s %s' % (str(time.time()), uuid.uuid4())
+ self.delimeters = ('#MozRunner Prefs Start %s' % nonce,
+ '#MozRunner Prefs End %s' % nonce)
+
+ # If sub-classes want to set default preferences
+ if hasattr(self.__class__, 'preferences'):
+ self.set_preferences(self.__class__.preferences)
+ # Set additional preferences
+ self.set_preferences(self._preferences)
+
+ self.permissions = Permissions(self.profile, self._locations)
+ prefs_js, user_js = self.permissions.network_prefs(self._proxy)
+ self.set_preferences(prefs_js, 'prefs.js')
+ self.set_preferences(user_js)
+
+ # handle add-on installation
+ self.addon_manager = AddonManager(self.profile, restore=self.restore)
+ self.addon_manager.install_addons(self._addons, self._addon_manifests)
+
+ # handle webapps
+ self.webapps = WebappCollection(profile=self.profile, apps=self._apps)
+ self.webapps.update_manifests()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.cleanup()
+
+ def __del__(self):
+ self.cleanup()
+
+ # cleanup
+
+ def cleanup(self):
+ """Cleanup operations for the profile."""
+
+ if self.restore:
+ # If copies of those class instances exist ensure we correctly
+ # reset them all (see bug 934484)
+ self.clean_preferences()
+ if getattr(self, 'addon_manager', None) is not None:
+ self.addon_manager.clean()
+ if getattr(self, 'permissions', None) is not None:
+ self.permissions.clean_db()
+ if getattr(self, 'webapps', None) is not None:
+ self.webapps.clean()
+
+ # If it's a temporary profile we have to remove it
+ if self.create_new:
+ mozfile.remove(self.profile)
+
+ def reset(self):
+ """
+ reset the profile to the beginning state
+ """
+ self.cleanup()
+
+ self._internal_init()
+
+ def clean_preferences(self):
+ """Removed preferences added by mozrunner."""
+ for filename in self.written_prefs:
+ if not os.path.exists(os.path.join(self.profile, filename)):
+ # file has been deleted
+ break
+ while True:
+ if not self.pop_preferences(filename):
+ break
+
+ @classmethod
+ def clone(cls, path_from, path_to=None, **kwargs):
+ """Instantiate a temporary profile via cloning
+ - path: path of the basis to clone
+ - kwargs: arguments to the profile constructor
+ """
+ if not path_to:
+ tempdir = tempfile.mkdtemp() # need an unused temp dir name
+ mozfile.remove(tempdir) # copytree requires that dest does not exist
+ path_to = tempdir
+ copytree(path_from, path_to)
+
+ c = cls(path_to, **kwargs)
+ c.create_new = True # deletes a cloned profile when restore is True
+ return c
+
+ def exists(self):
+ """returns whether the profile exists or not"""
+ return os.path.exists(self.profile)
+
+ # methods for preferences
+
+ def set_preferences(self, preferences, filename='user.js'):
+ """Adds preferences dict to profile preferences"""
+
+ # append to the file
+ prefs_file = os.path.join(self.profile, filename)
+ f = open(prefs_file, 'a')
+
+ if preferences:
+
+ # note what files we've touched
+ self.written_prefs.add(filename)
+
+ # opening delimeter
+ f.write('\n%s\n' % self.delimeters[0])
+
+ # write the preferences
+ Preferences.write(f, preferences)
+
+ # closing delimeter
+ f.write('%s\n' % self.delimeters[1])
+
+ f.close()
+
+ def set_persistent_preferences(self, preferences):
+ """
+ Adds preferences dict to profile preferences and save them during a
+ profile reset
+ """
+
+ # this is a dict sometimes, convert
+ if isinstance(preferences, dict):
+ preferences = preferences.items()
+
+ # add new prefs to preserve them during reset
+ for new_pref in preferences:
+ # if dupe remove item from original list
+ self._preferences = [
+ pref for pref in self._preferences if not new_pref[0] == pref[0]]
+ self._preferences.append(new_pref)
+
+ self.set_preferences(preferences, filename='user.js')
+
+ def pop_preferences(self, filename):
+ """
+ pop the last set of preferences added
+ returns True if popped
+ """
+
+ path = os.path.join(self.profile, filename)
+ with file(path) as f:
+ lines = f.read().splitlines()
+
+ def last_index(_list, value):
+ """
+ returns the last index of an item;
+ this should actually be part of python code but it isn't
+ """
+ for index in reversed(range(len(_list))):
+ if _list[index] == value:
+ return index
+ s = last_index(lines, self.delimeters[0])
+ e = last_index(lines, self.delimeters[1])
+
+ # ensure both markers are found
+ if s is None:
+ assert e is None, '%s found without %s' % (self.delimeters[1], self.delimeters[0])
+ return False # no preferences found
+ elif e is None:
+ assert s is None, '%s found without %s' % (self.delimeters[0], self.delimeters[1])
+
+ # ensure the markers are in the proper order
+ assert e > s, '%s found at %s, while %s found at %s' % (self.delimeters[1], e,
+ self.delimeters[0], s)
+
+ # write the prefs
+ cleaned_prefs = '\n'.join(lines[:s] + lines[e + 1:])
+ with file(path, 'w') as f:
+ f.write(cleaned_prefs)
+ return True
+
+ # methods for introspection
+
+ def summary(self, return_parts=False):
+ """
+ returns string summarizing profile information.
+ if return_parts is true, return the (Part_name, value) list
+ of tuples instead of the assembled string
+ """
+
+ parts = [('Path', self.profile)] # profile path
+
+ # directory tree
+ parts.append(('Files', '\n%s' % mozfile.tree(self.profile)))
+
+ # preferences
+ for prefs_file in ('user.js', 'prefs.js'):
+ path = os.path.join(self.profile, prefs_file)
+ if os.path.exists(path):
+
+ # prefs that get their own section
+ # This is currently only 'network.proxy.autoconfig_url'
+ # but could be expanded to include others
+ section_prefs = ['network.proxy.autoconfig_url']
+ line_length = 80
+ # buffer for 80 character display:
+ # length = 80 - len(key) - len(': ') - line_length_buffer
+ line_length_buffer = 10
+ line_length_buffer += len(': ')
+
+ def format_value(key, value):
+ if key not in section_prefs:
+ return value
+ max_length = line_length - len(key) - line_length_buffer
+ if len(value) > max_length:
+ value = '%s...' % value[:max_length]
+ return value
+
+ prefs = Preferences.read_prefs(path)
+ if prefs:
+ prefs = dict(prefs)
+ parts.append((prefs_file,
+ '\n%s' % ('\n'.join(
+ ['%s: %s' % (key, format_value(key, prefs[key]))
+ for key in sorted(prefs.keys())]))))
+
+ # Currently hardcorded to 'network.proxy.autoconfig_url'
+ # but could be generalized, possibly with a generalized (simple)
+ # JS-parser
+ network_proxy_autoconfig = prefs.get('network.proxy.autoconfig_url')
+ if network_proxy_autoconfig and network_proxy_autoconfig.strip():
+ network_proxy_autoconfig = network_proxy_autoconfig.strip()
+ lines = network_proxy_autoconfig.replace(';', ';\n').splitlines()
+ lines = [line.strip() for line in lines]
+ origins_string = 'var origins = ['
+ origins_end = '];'
+ if origins_string in lines[0]:
+ start = lines[0].find(origins_string)
+ end = lines[0].find(origins_end, start)
+ splitline = [lines[0][:start],
+ lines[0][start:start + len(origins_string) - 1],
+ ]
+ splitline.extend(lines[0][start + len(origins_string):end].replace(
+ ',', ',\n').splitlines())
+ splitline.append(lines[0][end:])
+ lines[0:1] = [i.strip() for i in splitline]
+ parts.append(('Network Proxy Autoconfig, %s' % (prefs_file),
+ '\n%s' % '\n'.join(lines)))
+
+ if return_parts:
+ return parts
+
+ retval = '%s\n' % ('\n\n'.join(['[%s]: %s' % (key, value)
+ for key, value in parts]))
+ return retval
+
+ __str__ = summary
+
+
+class FirefoxProfile(Profile):
+ """Specialized Profile subclass for Firefox"""
+
+ preferences = { # Don't automatically update the application
+ 'app.update.enabled': False,
+ # Don't restore the last open set of tabs if the browser has crashed
+ 'browser.sessionstore.resume_from_crash': False,
+ # Don't check for the default web browser during startup
+ 'browser.shell.checkDefaultBrowser': False,
+ # Don't warn on exit when multiple tabs are open
+ 'browser.tabs.warnOnClose': False,
+ # Don't warn when exiting the browser
+ 'browser.warnOnQuit': False,
+ # Don't send Firefox health reports to the production server
+ 'datareporting.healthreport.documentServerURI': 'http://%(server)s/healthreport/',
+ # Only install add-ons from the profile and the application scope
+ # Also ensure that those are not getting disabled.
+ # see: https://developer.mozilla.org/en/Installing_extensions
+ 'extensions.enabledScopes': 5,
+ 'extensions.autoDisableScopes': 10,
+ # Don't send the list of installed addons to AMO
+ 'extensions.getAddons.cache.enabled': False,
+ # Don't install distribution add-ons from the app folder
+ 'extensions.installDistroAddons': False,
+ # Dont' run the add-on compatibility check during start-up
+ 'extensions.showMismatchUI': False,
+ # Don't automatically update add-ons
+ 'extensions.update.enabled': False,
+ # Don't open a dialog to show available add-on updates
+ 'extensions.update.notifyUser': False,
+ # Enable test mode to run multiple tests in parallel
+ 'focusmanager.testmode': True,
+ # Enable test mode to not raise an OS level dialog for location sharing
+ 'geo.provider.testing': True,
+ # Suppress delay for main action in popup notifications
+ 'security.notification_enable_delay': 0,
+ # Suppress automatic safe mode after crashes
+ 'toolkit.startup.max_resumed_crashes': -1,
+ # Don't report telemetry information
+ 'toolkit.telemetry.enabled': False,
+ # Don't send Telemetry reports to the production server. This is
+ # needed as Telemetry sends pings also if FHR upload is enabled.
+ 'toolkit.telemetry.server': 'http://%(server)s/telemetry-dummy/',
+ }
+
+
+class MetroFirefoxProfile(Profile):
+ """Specialized Profile subclass for Firefox Metro"""
+
+ preferences = { # Don't automatically update the application for desktop and metro build
+ 'app.update.enabled': False,
+ 'app.update.metro.enabled': False,
+ # Dismiss first run content overlay
+ 'browser.firstrun-content.dismissed': True,
+ # Don't restore the last open set of tabs if the browser has crashed
+ 'browser.sessionstore.resume_from_crash': False,
+ # Don't check for the default web browser during startup
+ 'browser.shell.checkDefaultBrowser': False,
+ # Don't send Firefox health reports to the production server
+ 'datareporting.healthreport.documentServerURI': 'http://%(server)s/healthreport/',
+ # Enable extensions
+ 'extensions.defaultProviders.enabled': True,
+ # Only install add-ons from the profile and the application scope
+ # Also ensure that those are not getting disabled.
+ # see: https://developer.mozilla.org/en/Installing_extensions
+ 'extensions.enabledScopes': 5,
+ 'extensions.autoDisableScopes': 10,
+ # Don't send the list of installed addons to AMO
+ 'extensions.getAddons.cache.enabled': False,
+ # Don't install distribution add-ons from the app folder
+ 'extensions.installDistroAddons': False,
+ # Dont' run the add-on compatibility check during start-up
+ 'extensions.showMismatchUI': False,
+ # Disable strict compatibility checks to allow add-ons enabled by default
+ 'extensions.strictCompatibility': False,
+ # Don't automatically update add-ons
+ 'extensions.update.enabled': False,
+ # Don't open a dialog to show available add-on updates
+ 'extensions.update.notifyUser': False,
+ # Enable test mode to run multiple tests in parallel
+ 'focusmanager.testmode': True,
+ # Suppress delay for main action in popup notifications
+ 'security.notification_enable_delay': 0,
+ # Suppress automatic safe mode after crashes
+ 'toolkit.startup.max_resumed_crashes': -1,
+ # Don't report telemetry information
+ 'toolkit.telemetry.enabled': False,
+ # Don't send Telemetry reports to the production server. This is
+ # needed as Telemetry sends pings also if FHR upload is enabled.
+ 'toolkit.telemetry.server': 'http://%(server)s/telemetry-dummy/',
+ }
+
+
+class ThunderbirdProfile(Profile):
+ """Specialized Profile subclass for Thunderbird"""
+
+ preferences = {'extensions.update.enabled': False,
+ 'extensions.update.notifyUser': False,
+ 'browser.shell.checkDefaultBrowser': False,
+ 'browser.tabs.warnOnClose': False,
+ 'browser.warnOnQuit': False,
+ 'browser.sessionstore.resume_from_crash': False,
+ # prevents the 'new e-mail address' wizard on new profile
+ 'mail.provider.enabled': False,
+ }
diff --git a/testing/mozbase/mozprofile/mozprofile/view.py b/testing/mozbase/mozprofile/mozprofile/view.py
new file mode 100644
index 000000000..fcab85b0a
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/view.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+"""
+script to view mozilla profiles
+"""
+
+import mozprofile
+import optparse
+import os
+import sys
+
+__all__ = ['view_profile']
+
+
+def view_profile(args=sys.argv[1:]):
+
+ usage = '%prog [options] profile_path <...>'
+ parser = optparse.OptionParser(usage=usage, description=__doc__)
+ options, args = parser.parse_args(args)
+ if not args:
+ parser.print_usage()
+ parser.exit()
+
+ # check existence
+ missing = [i for i in args
+ if not os.path.exists(i)]
+ if missing:
+ if len(missing) > 1:
+ missing_string = "Profiles do not exist"
+ else:
+ missing_string = "Profile does not exist"
+ parser.error("%s: %s" % (missing_string, ', '.join(missing)))
+
+ # print summary for each profile
+ while args:
+ path = args.pop(0)
+ profile = mozprofile.Profile(path)
+ print profile.summary()
+ if args:
+ print '-' * 4
+
+if __name__ == '__main__':
+ view_profile()
diff --git a/testing/mozbase/mozprofile/mozprofile/webapps.py b/testing/mozbase/mozprofile/mozprofile/webapps.py
new file mode 100644
index 000000000..4daf9ef06
--- /dev/null
+++ b/testing/mozbase/mozprofile/mozprofile/webapps.py
@@ -0,0 +1,281 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Handles installing open webapps (https://developer.mozilla.org/en-US/docs/Apps)
+to a profile. A webapp object is a dict that contains some metadata about
+the webapp and must at least include a name, description and manifestURL.
+
+Each webapp has a manifest (https://developer.mozilla.org/en-US/docs/Apps/Manifest).
+Additionally there is a separate json manifest that keeps track of the installed
+webapps, their manifestURLs and their permissions.
+"""
+
+from string import Template
+import json
+import os
+import shutil
+
+import mozfile
+
+__all__ = ["Webapp", "WebappCollection", "WebappFormatException", "APP_STATUS_NOT_INSTALLED",
+ "APP_STATUS_INSTALLED", "APP_STATUS_PRIVILEGED", "APP_STATUS_CERTIFIED"]
+
+
+# from http://hg.mozilla.org/mozilla-central/file/add0b94c2c0b/caps/idl/nsIPrincipal.idl#l163
+APP_STATUS_NOT_INSTALLED = 0
+APP_STATUS_INSTALLED = 1
+APP_STATUS_PRIVILEGED = 2
+APP_STATUS_CERTIFIED = 3
+
+
+class WebappFormatException(Exception):
+ """thrown for invalid webapp objects"""
+
+
+class Webapp(dict):
+ """A webapp definition"""
+
+ required_keys = ('name', 'description', 'manifestURL')
+
+ def __init__(self, *args, **kwargs):
+ try:
+ dict.__init__(self, *args, **kwargs)
+ except (TypeError, ValueError):
+ raise WebappFormatException("Webapp object should be an instance of type 'dict'")
+ self.validate()
+
+ def __eq__(self, other):
+ """Webapps are considered equal if they have the same name"""
+ if not isinstance(other, self.__class__):
+ return False
+ return self['name'] == other['name']
+
+ def __ne__(self, other):
+ """Webapps are considered not equal if they have different names"""
+ return not self.__eq__(other)
+
+ def validate(self):
+ # TODO some keys are required if another key has a certain value
+ for key in self.required_keys:
+ if key not in self:
+ raise WebappFormatException("Webapp object missing required key '%s'" % key)
+
+
+class WebappCollection(object):
+ """A list-like object that collects webapps and updates the webapp manifests"""
+
+ json_template = Template(""""$name": {
+ "name": "$name",
+ "origin": "$origin",
+ "installOrigin": "$origin",
+ "receipt": null,
+ "installTime": 132333986000,
+ "manifestURL": "$manifestURL",
+ "localId": $localId,
+ "id": "$name",
+ "appStatus": $appStatus,
+ "csp": "$csp"
+}""")
+
+ manifest_template = Template("""{
+ "name": "$name",
+ "csp": "$csp",
+ "description": "$description",
+ "launch_path": "/",
+ "developer": {
+ "name": "Mozilla",
+ "url": "https://mozilla.org/"
+ },
+ "permissions": [
+ ],
+ "locales": {
+ "en-US": {
+ "name": "$name",
+ "description": "$description"
+ }
+ },
+ "default_locale": "en-US",
+ "icons": {
+ }
+}
+""")
+
+ def __init__(self, profile, apps=None, json_template=None, manifest_template=None):
+ """
+ :param profile: the file path to a profile
+ :param apps: [optional] a list of webapp objects or file paths to json files describing
+ webapps
+ :param json_template: [optional] string template describing the webapp json format
+ :param manifest_template: [optional] string template describing the webapp manifest format
+ """
+ if not isinstance(profile, basestring):
+ raise TypeError("Must provide path to a profile, received '%s'" % type(profile))
+ self.profile = profile
+ self.webapps_dir = os.path.join(self.profile, 'webapps')
+ self.backup_dir = os.path.join(self.profile, '.mozprofile_backup', 'webapps')
+
+ self._apps = []
+ self._installed_apps = []
+ if apps:
+ if not isinstance(apps, (list, set, tuple)):
+ apps = [apps]
+
+ for app in apps:
+ if isinstance(app, basestring) and os.path.isfile(app):
+ self.extend(self.read_json(app))
+ else:
+ self.append(app)
+
+ self.json_template = json_template or self.json_template
+ self.manifest_template = manifest_template or self.manifest_template
+
+ def __getitem__(self, index):
+ return self._apps.__getitem__(index)
+
+ def __setitem__(self, index, value):
+ return self._apps.__setitem__(index, Webapp(value))
+
+ def __delitem__(self, index):
+ return self._apps.__delitem__(index)
+
+ def __len__(self):
+ return self._apps.__len__()
+
+ def __contains__(self, value):
+ return self._apps.__contains__(Webapp(value))
+
+ def append(self, value):
+ return self._apps.append(Webapp(value))
+
+ def insert(self, index, value):
+ return self._apps.insert(index, Webapp(value))
+
+ def extend(self, values):
+ return self._apps.extend([Webapp(v) for v in values])
+
+ def remove(self, value):
+ return self._apps.remove(Webapp(value))
+
+ def _write_webapps_json(self, apps):
+ contents = []
+ for app in apps:
+ contents.append(self.json_template.substitute(app))
+ contents = '{\n' + ',\n'.join(contents) + '\n}\n'
+ webapps_json_path = os.path.join(self.webapps_dir, 'webapps.json')
+ webapps_json_file = open(webapps_json_path, "w")
+ webapps_json_file.write(contents)
+ webapps_json_file.close()
+
+ def _write_webapp_manifests(self, write_apps=[], remove_apps=[]):
+ # Write manifests for installed apps
+ for app in write_apps:
+ manifest_dir = os.path.join(self.webapps_dir, app['name'])
+ manifest_path = os.path.join(manifest_dir, 'manifest.webapp')
+ if not os.path.isfile(manifest_path):
+ if not os.path.isdir(manifest_dir):
+ os.mkdir(manifest_dir)
+ manifest = self.manifest_template.substitute(app)
+ manifest_file = open(manifest_path, "a")
+ manifest_file.write(manifest)
+ manifest_file.close()
+ # Remove manifests for removed apps
+ for app in remove_apps:
+ self._installed_apps.remove(app)
+ manifest_dir = os.path.join(self.webapps_dir, app['name'])
+ mozfile.remove(manifest_dir)
+
+ def update_manifests(self):
+ """Updates the webapp manifests with the webapps represented in this collection
+
+ If update_manifests is called a subsequent time, there could have been apps added or
+ removed to the collection in the interim. The manifests will be adjusted accordingly
+ """
+ apps_to_install = [app for app in self._apps if app not in self._installed_apps]
+ apps_to_remove = [app for app in self._installed_apps if app not in self._apps]
+ if apps_to_install == apps_to_remove == []:
+ # nothing to do
+ return
+
+ if not os.path.isdir(self.webapps_dir):
+ os.makedirs(self.webapps_dir)
+ elif not self._installed_apps:
+ shutil.copytree(self.webapps_dir, self.backup_dir)
+
+ webapps_json_path = os.path.join(self.webapps_dir, 'webapps.json')
+ webapps_json = []
+ if os.path.isfile(webapps_json_path):
+ webapps_json = self.read_json(webapps_json_path, description="description")
+ webapps_json = [a for a in webapps_json if a not in apps_to_remove]
+
+ # Iterate over apps already in webapps.json to determine the starting local
+ # id and to ensure apps are properly formatted
+ start_id = 1
+ for local_id, app in enumerate(webapps_json):
+ app['localId'] = local_id + 1
+ start_id += 1
+ if not app.get('csp'):
+ app['csp'] = ''
+ if not app.get('appStatus'):
+ app['appStatus'] = 3
+
+ # Append apps_to_install to the pre-existent apps
+ for local_id, app in enumerate(apps_to_install):
+ app['localId'] = local_id + start_id
+ # ignore if it's already installed
+ if app in webapps_json:
+ start_id -= 1
+ continue
+ webapps_json.append(app)
+ self._installed_apps.append(app)
+
+ # Write the full contents to webapps.json
+ self._write_webapps_json(webapps_json)
+
+ # Create/remove manifest file for each app.
+ self._write_webapp_manifests(apps_to_install, apps_to_remove)
+
+ def clean(self):
+ """Remove all webapps that were installed and restore profile to previous state"""
+ if self._installed_apps:
+ mozfile.remove(self.webapps_dir)
+
+ if os.path.isdir(self.backup_dir):
+ shutil.copytree(self.backup_dir, self.webapps_dir)
+ mozfile.remove(self.backup_dir)
+
+ self._apps = []
+ self._installed_apps = []
+
+ @classmethod
+ def read_json(cls, path, **defaults):
+ """Reads a json file which describes a set of webapps. The json format is either a
+ dictionary where each key represents the name of a webapp (e.g B2G format) or a list
+ of webapp objects.
+
+ :param path: Path to a json file defining webapps
+ :param defaults: Default key value pairs added to each webapp object if key doesn't exist
+
+ Returns a list of Webapp objects
+ """
+ f = open(path, 'r')
+ app_json = json.load(f)
+ f.close()
+
+ apps = []
+ if isinstance(app_json, dict):
+ for k, v in app_json.iteritems():
+ v['name'] = k
+ apps.append(v)
+ else:
+ apps = app_json
+ if not isinstance(apps, list):
+ apps = [apps]
+
+ ret = []
+ for app in apps:
+ d = defaults.copy()
+ d.update(app)
+ ret.append(Webapp(**d))
+ return ret
diff --git a/testing/mozbase/mozprofile/setup.py b/testing/mozbase/mozprofile/setup.py
new file mode 100644
index 000000000..a88fe7053
--- /dev/null
+++ b/testing/mozbase/mozprofile/setup.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from setuptools import setup
+
+PACKAGE_NAME = 'mozprofile'
+PACKAGE_VERSION = '0.28'
+
+# we only support python 2 right now
+assert sys.version_info[0] == 2
+
+deps = ['mozfile >= 1.0', 'mozlog >= 3.0']
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Library to create and modify Mozilla application profiles",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=['Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL 2.0',
+ packages=['mozprofile'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ extras_require={'manifest': ['manifestparser >= 0.6']},
+ tests_require=['mozhttpd'],
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozprofile = mozprofile:cli
+ view-profile = mozprofile:view_profile
+ diff-profiles = mozprofile:diff_profiles
+ """, )
diff --git a/testing/mozbase/mozprofile/tests/addon_stubs.py b/testing/mozbase/mozprofile/tests/addon_stubs.py
new file mode 100644
index 000000000..f9602de46
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/addon_stubs.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+import os
+import tempfile
+import zipfile
+
+import mozfile
+
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+# stubs is a dict of the form {'addon id': 'install manifest content'}
+stubs = {
+ 'test-addon-1@mozilla.org': 'test_addon_1.rdf',
+ 'test-addon-2@mozilla.org': 'test_addon_2.rdf',
+ 'test-addon-3@mozilla.org': 'test_addon_3.rdf',
+ 'test-addon-4@mozilla.org': 'test_addon_4.rdf',
+ 'test-addon-invalid-no-id@mozilla.org': 'test_addon_invalid_no_id.rdf',
+ 'test-addon-invalid-version@mozilla.org': 'test_addon_invalid_version.rdf',
+ 'test-addon-invalid-no-manifest@mozilla.org': None,
+ 'test-addon-invalid-not-wellformed@mozilla.org': 'test_addon_invalid_not_wellformed.rdf',
+ 'test-addon-unpack@mozilla.org': 'test_addon_unpack.rdf'}
+
+
+def generate_addon(addon_id, path=None, name=None, xpi=True):
+ """
+ Method to generate a single addon.
+
+ :param addon_id: id of an addon to generate from the stubs dictionary
+ :param path: path where addon and .xpi should be generated
+ :param name: name for the addon folder or .xpi file
+ :param xpi: Flag if an XPI or folder should be generated
+
+ Returns the file-path of the addon's .xpi file
+ """
+
+ if addon_id not in stubs.keys():
+ raise IOError('Requested addon stub "%s" does not exist' % addon_id)
+
+ # Generate directory structure for addon
+ try:
+ tmpdir = path or tempfile.mkdtemp()
+ addon_dir = os.path.join(tmpdir, name or addon_id)
+ os.mkdir(addon_dir)
+ except IOError:
+ raise IOError('Could not generate directory structure for addon stub.')
+
+ # Write install.rdf for addon
+ if stubs[addon_id]:
+ install_rdf = os.path.join(addon_dir, 'install.rdf')
+ with open(install_rdf, 'w') as f:
+ manifest = os.path.join(here, 'install_manifests', stubs[addon_id])
+ f.write(open(manifest, 'r').read())
+
+ if not xpi:
+ return addon_dir
+
+ # Generate the .xpi for the addon
+ xpi_file = os.path.join(tmpdir, (name or addon_id) + '.xpi')
+ with zipfile.ZipFile(xpi_file, 'w') as x:
+ x.write(install_rdf, install_rdf[len(addon_dir):])
+
+ # Ensure we remove the temporary folder to not install the addon twice
+ mozfile.rmtree(addon_dir)
+
+ return xpi_file
+
+
+def generate_manifest(addon_list, path=None):
+ tmpdir = path or tempfile.mkdtemp()
+ addons = [generate_addon(addon, path=tmpdir) for addon in addon_list]
+
+ manifest = os.path.join(tmpdir, 'manifest.ini')
+ with open(manifest, 'w') as f:
+ for addon in addons:
+ f.write('[' + addon + ']\n')
+
+ return manifest
diff --git a/testing/mozbase/mozprofile/tests/addonid.py b/testing/mozbase/mozprofile/tests/addonid.py
new file mode 100755
index 000000000..f76c5a913
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/addonid.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+
+import os
+import tempfile
+import unittest
+import shutil
+from mozprofile import addons
+
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class AddonIDTest(unittest.TestCase):
+ """ Test finding the addon id in a variety of install.rdf styles """
+
+ def make_install_rdf(self, filecontents):
+ path = tempfile.mkdtemp()
+ f = open(os.path.join(path, "install.rdf"), "w")
+ f.write(filecontents)
+ f.close()
+ return path
+
+ def test_addonID(self):
+ testlist = self.get_test_list()
+ for t in testlist:
+ try:
+ p = self.make_install_rdf(t)
+ a = addons.AddonManager(os.path.join(p, "profile"))
+ addon_id = a.addon_details(p)['id']
+ self.assertEqual(addon_id, "winning", "We got the addon id")
+ finally:
+ shutil.rmtree(p)
+
+ def test_addonID_xpi(self):
+ a = addons.AddonManager("profile")
+ addon = a.addon_details(os.path.join(here, "addons", "empty.xpi"))
+ self.assertEqual(addon['id'], "test-empty@quality.mozilla.org", "We got the addon id")
+
+ def get_test_list(self):
+ """ This just returns a hardcoded list of install.rdf snippets for testing.
+ When adding snippets for testing, remember that the id we're looking for
+ is "winning" (no quotes). So, make sure you have that id in your snippet
+ if you want it to pass.
+ """
+ tests = [
+ """<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>winning</em:id>
+ <em:name>MozMill</em:name>
+ <em:version>2.0a</em:version>
+ <em:creator>Adam Christian</em:creator>
+ <em:description>A testing extension based on the
+ Windmill Testing Framework client source</em:description>
+ <em:unpack>true</em:unpack>
+ <em:targetApplication>
+ <!-- Firefox -->
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5</em:minVersion>
+ <em:maxVersion>8.*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ <em:targetApplication>
+ <!-- Thunderbird -->
+ <Description>
+ <em:id>{3550f703-e582-4d05-9a08-453d09bdfdc6}</em:id>
+ <em:minVersion>3.0a1pre</em:minVersion>
+ <em:maxVersion>3.2*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ <em:targetApplication>
+ <!-- Sunbird -->
+ <Description>
+ <em:id>{718e30fb-e89b-41dd-9da7-e25a45638b28}</em:id>
+ <em:minVersion>0.6a1</em:minVersion>
+ <em:maxVersion>1.0pre</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ <em:targetApplication>
+ <!-- SeaMonkey -->
+ <Description>
+ <em:id>{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}</em:id>
+ <em:minVersion>2.0a1</em:minVersion>
+ <em:maxVersion>2.1*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ <em:targetApplication>
+ <!-- Songbird -->
+ <Description>
+ <em:id>songbird@songbirdnest.com</em:id>
+ <em:minVersion>0.3pre</em:minVersion>
+ <em:maxVersion>1.3.0a</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ <em:targetApplication>
+ <Description>
+ <em:id>toolkit@mozilla.org</em:id>
+ <em:minVersion>1.9.1</em:minVersion>
+ <em:maxVersion>2.0*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>""",
+ """<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:targetApplication>
+ <!-- Firefox -->
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5</em:minVersion>
+ <em:maxVersion>8.*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ <em:id>winning</em:id>
+ <em:name>MozMill</em:name>
+ <em:version>2.0a</em:version>
+ <em:creator>Adam Christian</em:creator>
+ <em:description>A testing extension based on the
+ Windmill Testing Framework client source</em:description>
+ <em:unpack>true</em:unpack>
+ </Description>
+ </RDF>""",
+ """<RDF xmlns="http://www.mozilla.org/2004/em-rdf#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+ <rdf:Description about="urn:mozilla:install-manifest">
+ <id>winning</id>
+ <name>foo</name>
+ <version>42</version>
+ <description>A testing extension based on the
+ Windmill Testing Framework client source</description>
+ </rdf:Description>
+</RDF>""",
+ """<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:foobar="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <foobar:targetApplication>
+ <!-- Firefox -->
+ <Description>
+ <foobar:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</foobar:id>
+ <foobar:minVersion>3.5</foobar:minVersion>
+ <foobar:maxVersion>8.*</foobar:maxVersion>
+ </Description>
+ </foobar:targetApplication>
+ <foobar:id>winning</foobar:id>
+ <foobar:name>MozMill</foobar:name>
+ <foobar:version>2.0a</foobar:version>
+ <foobar:creator>Adam Christian</foobar:creator>
+ <foobar:description>A testing extension based on the
+ Windmill Testing Framework client source</foobar:description>
+ <foobar:unpack>true</foobar:unpack>
+ </Description>
+ </RDF>""",
+ """<?xml version="1.0"?>
+<!--
+
+-->
+
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest"
+ em:id="winning"
+ em:name="Language Pack"
+ em:version="42.0a2"
+ em:type="8"
+ em:creator="Some Contributor">
+ <em:contributor></em:contributor>
+
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>42.0a2</em:minVersion>
+ <em:maxVersion>42.0a2</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
+"""]
+ return tests
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/addons/empty.xpi b/testing/mozbase/mozprofile/tests/addons/empty.xpi
new file mode 100644
index 000000000..26f28f099
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/addons/empty.xpi
Binary files differ
diff --git a/testing/mozbase/mozprofile/tests/addons/empty/install.rdf b/testing/mozbase/mozprofile/tests/addons/empty/install.rdf
new file mode 100644
index 000000000..70b9e13e4
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/addons/empty/install.rdf
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-empty@quality.mozilla.org</em:id>
+ <em:version>0.1</em:version>
+ <em:name>Test Extension (empty)</em:name>
+ <em:creator>Mozilla QA</em:creator>
+ <em:homepageURL>http://quality.mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/addons/invalid.xpi b/testing/mozbase/mozprofile/tests/addons/invalid.xpi
new file mode 100644
index 000000000..2f222c763
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/addons/invalid.xpi
Binary files differ
diff --git a/testing/mozbase/mozprofile/tests/bug758250.py b/testing/mozbase/mozprofile/tests/bug758250.py
new file mode 100755
index 000000000..f25901a19
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/bug758250.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+import mozprofile
+import os
+import shutil
+import tempfile
+import unittest
+
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class Bug758250(unittest.TestCase):
+ """
+ use of --profile in mozrunner just blows away addon sources:
+ https://bugzilla.mozilla.org/show_bug.cgi?id=758250
+ """
+
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp()
+ self.addon = os.path.join(here, 'addons', 'empty')
+
+ def tearDown(self):
+ # remove vestiges
+ shutil.rmtree(self.tmpdir)
+
+ def test_profile_addon_cleanup(self):
+
+ # sanity check: the empty addon should be here
+ self.assertTrue(os.path.exists(self.addon))
+ self.assertTrue(os.path.isdir(self.addon))
+ self.assertTrue(os.path.exists(os.path.join(self.addon, 'install.rdf')))
+
+ # because we are testing data loss, let's make sure we make a copy
+ shutil.rmtree(self.tmpdir)
+ shutil.copytree(self.addon, self.tmpdir)
+ self.assertTrue(os.path.exists(os.path.join(self.tmpdir, 'install.rdf')))
+
+ # make a starter profile
+ profile = mozprofile.FirefoxProfile()
+ path = profile.profile
+
+ # make a new profile based on the old
+ newprofile = mozprofile.FirefoxProfile(profile=path, addons=[self.tmpdir])
+ newprofile.cleanup()
+
+ # the source addon *should* still exist
+ self.assertTrue(os.path.exists(self.tmpdir))
+ self.assertTrue(os.path.exists(os.path.join(self.tmpdir, 'install.rdf')))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/bug785146.py b/testing/mozbase/mozprofile/tests/bug785146.py
new file mode 100755
index 000000000..2bbf4fb05
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/bug785146.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import os
+import shutil
+import sqlite3
+import tempfile
+import unittest
+from mozprofile.permissions import Permissions
+
+
+class PermissionsTest(unittest.TestCase):
+
+ locations = """http://mochi.test:8888 primary,privileged
+http://127.0.0.1:80 noxul
+http://127.0.0.1:8888 privileged
+"""
+
+ def setUp(self):
+ self.profile_dir = tempfile.mkdtemp()
+ self.locations_file = mozfile.NamedTemporaryFile()
+ self.locations_file.write(self.locations)
+ self.locations_file.flush()
+
+ def tearDown(self):
+ if self.profile_dir:
+ shutil.rmtree(self.profile_dir)
+ if self.locations_file:
+ self.locations_file.close()
+
+ def test_schema_version(self):
+ perms = Permissions(self.profile_dir, self.locations_file.name)
+ perms_db_filename = os.path.join(self.profile_dir, 'permissions.sqlite')
+ perms.write_db(self.locations_file)
+
+ stmt = 'PRAGMA user_version;'
+
+ con = sqlite3.connect(perms_db_filename)
+ cur = con.cursor()
+ cur.execute(stmt)
+ entries = cur.fetchall()
+
+ schema_version = entries[0][0]
+ self.assertEqual(schema_version, 5)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/files/not_an_addon.txt b/testing/mozbase/mozprofile/tests/files/not_an_addon.txt
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/files/not_an_addon.txt
diff --git a/testing/mozbase/mozprofile/tests/files/prefs_with_comments.js b/testing/mozbase/mozprofile/tests/files/prefs_with_comments.js
new file mode 100644
index 000000000..06a56f213
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/files/prefs_with_comments.js
@@ -0,0 +1,6 @@
+# A leading comment
+user_pref("browser.startup.homepage", "http://planet.mozilla.org"); # A trailing comment
+user_pref("zoom.minPercent", 30);
+// Another leading comment
+user_pref("zoom.maxPercent", 300); // Another trailing comment
+user_pref("webgl.verbose", "false");
diff --git a/testing/mozbase/mozprofile/tests/files/prefs_with_interpolation.js b/testing/mozbase/mozprofile/tests/files/prefs_with_interpolation.js
new file mode 100644
index 000000000..d0b30bf7b
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/files/prefs_with_interpolation.js
@@ -0,0 +1,4 @@
+user_pref("browser.foo", "http://{server}");
+user_pref("zoom.minPercent", 30);
+user_pref("webgl.verbose", "false");
+user_pref("browser.bar", "{abc}xyz");
diff --git a/testing/mozbase/mozprofile/tests/files/webapps1.json b/testing/mozbase/mozprofile/tests/files/webapps1.json
new file mode 100644
index 000000000..00220a3d1
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/files/webapps1.json
@@ -0,0 +1,50 @@
+[{ "name": "http_example_org",
+ "csp": "",
+ "origin": "http://example.org",
+ "manifestURL": "http://example.org/manifest.webapp",
+ "description": "http://example.org App",
+ "appStatus": 1
+ },
+ { "name": "https_example_com",
+ "csp": "",
+ "origin": "https://example.com",
+ "manifestURL": "https://example.com/manifest.webapp",
+ "description": "https://example.com App",
+ "appStatus": 1
+ },
+ { "name": "http_test1_example_org",
+ "csp": "",
+ "origin": "http://test1.example.org",
+ "manifestURL": "http://test1.example.org/manifest.webapp",
+ "description": "http://test1.example.org App",
+ "appStatus": 1
+ },
+ { "name": "http_test1_example_org_8000",
+ "csp": "",
+ "origin": "http://test1.example.org:8000",
+ "manifestURL": "http://test1.example.org:8000/manifest.webapp",
+ "description": "http://test1.example.org:8000 App",
+ "appStatus": 1
+ },
+ { "name": "http_sub1_test1_example_org",
+ "csp": "",
+ "origin": "http://sub1.test1.example.org",
+ "manifestURL": "http://sub1.test1.example.org/manifest.webapp",
+ "description": "http://sub1.test1.example.org App",
+ "appStatus": 1
+ },
+ { "name": "https_example_com_privileged",
+ "csp": "",
+ "origin": "https://example.com",
+ "manifestURL": "https://example.com/manifest_priv.webapp",
+ "description": "https://example.com Privileged App",
+ "appStatus": 2
+ },
+ { "name": "https_example_com_certified",
+ "csp": "",
+ "origin": "https://example.com",
+ "manifestURL": "https://example.com/manifest_cert.webapp",
+ "description": "https://example.com Certified App",
+ "appStatus": 3
+ }
+]
diff --git a/testing/mozbase/mozprofile/tests/files/webapps2.json b/testing/mozbase/mozprofile/tests/files/webapps2.json
new file mode 100644
index 000000000..03e84a041
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/files/webapps2.json
@@ -0,0 +1,37 @@
+{
+ "https_example_csp_certified": {
+ "csp": "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
+ "origin": "https://example.com",
+ "manifestURL": "https://example.com/manifest_csp_cert.webapp",
+ "description": "https://example.com certified app with manifest policy",
+ "appStatus": 3
+ },
+ "https_example_csp_installed": {
+ "csp": "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
+ "origin": "https://example.com",
+ "manifestURL": "https://example.com/manifest_csp_inst.webapp",
+ "description": "https://example.com installed app with manifest policy",
+ "appStatus": 1
+ },
+ "https_example_csp_privileged": {
+ "csp": "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
+ "origin": "https://example.com",
+ "manifestURL": "https://example.com/manifest_csp_priv.webapp",
+ "description": "https://example.com privileged app with manifest policy",
+ "appStatus": 2
+ },
+ "https_a_domain_certified": {
+ "csp": "",
+ "origin": "https://acertified.com",
+ "manifestURL": "https://acertified.com/manifest.webapp",
+ "description": "https://acertified.com certified app",
+ "appStatus": 3
+ },
+ "https_a_domain_privileged": {
+ "csp": "",
+ "origin": "https://aprivileged.com",
+ "manifestURL": "https://aprivileged.com/manifest.webapp",
+ "description": "https://aprivileged.com privileged app ",
+ "appStatus": 2
+ }
+}
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_1.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_1.rdf
new file mode 100644
index 000000000..839ea9fbd
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_1.rdf
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-addon-1@mozilla.org</em:id>
+ <em:version>0.1</em:version>
+ <em:name>Test Add-on 1</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_2.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_2.rdf
new file mode 100644
index 000000000..8303e862f
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_2.rdf
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-addon-2@mozilla.org</em:id>
+ <em:version>0.2</em:version>
+ <em:name>Test Add-on 2</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_3.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_3.rdf
new file mode 100644
index 000000000..5bd6d3804
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_3.rdf
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-addon-3@mozilla.org</em:id>
+ <em:version>0.1</em:version>
+ <em:name>Test Add-on 3</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
+
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_4.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_4.rdf
new file mode 100644
index 000000000..e0f99d313
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_4.rdf
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-addon-4@mozilla.org</em:id>
+ <em:version>0.1</em:version>
+ <em:name>Test Add-on 4</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
+
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_no_id.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_no_id.rdf
new file mode 100644
index 000000000..23f60fece
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_no_id.rdf
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <!-- Invalid because of a missing add-on id -->
+ <em:version>0.1</em:version>
+ <em:name>Test Invalid Extension (no id)</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <!-- Invalid target application string -->
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_not_wellformed.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_not_wellformed.rdf
new file mode 100644
index 000000000..690ec406c
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_not_wellformed.rdf
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <!-- Invalid because it's not well-formed -->
+ <em:id>test-addon-invalid-not-wellformed@mozilla.org</em:id
+ <em:version>0.1</em:version>
+ <em:name>Test Invalid Extension (no id)</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <!-- Invalid target application string -->
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_version.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_version.rdf
new file mode 100644
index 000000000..c854bfcdb
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_version.rdf
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-addon-invalid-version@mozilla.org</em:id>
+ <!-- Invalid addon version -->
+ <em:version>0.NOPE</em:version>
+ <em:name>Test Invalid Extension (invalid version)</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <!-- Invalid target application string -->
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/install_manifests/test_addon_unpack.rdf b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_unpack.rdf
new file mode 100644
index 000000000..cc85ea560
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/install_manifests/test_addon_unpack.rdf
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>test-addon-unpack@mozilla.org</em:id>
+ <em:version>0.1</em:version>
+ <em:name>Test Add-on (unpack)</em:name>
+ <em:creator>Mozilla</em:creator>
+ <em:homepageURL>http://mozilla.org</em:homepageURL>
+ <em:type>2</em:type>
+ <em:unpack>true</em:unpack>
+
+ <!-- Firefox -->
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>3.5.*</em:minVersion>
+ <em:maxVersion>*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/testing/mozbase/mozprofile/tests/manifest.ini b/testing/mozbase/mozprofile/tests/manifest.ini
new file mode 100644
index 000000000..3e5ea50d6
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/manifest.ini
@@ -0,0 +1,12 @@
+[addonid.py]
+[server_locations.py]
+[test_preferences.py]
+[permissions.py]
+[bug758250.py]
+[test_nonce.py]
+[bug785146.py]
+[test_clone_cleanup.py]
+[test_webapps.py]
+[test_profile.py]
+[test_profile_view.py]
+[test_addons.py]
diff --git a/testing/mozbase/mozprofile/tests/permissions.py b/testing/mozbase/mozprofile/tests/permissions.py
new file mode 100755
index 000000000..8889277af
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/permissions.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import os
+import shutil
+import sqlite3
+import tempfile
+import unittest
+from mozprofile.permissions import Permissions
+
+
+class PermissionsTest(unittest.TestCase):
+
+ locations = """http://mochi.test:8888 primary,privileged
+http://127.0.0.1:80 noxul
+http://127.0.0.1:8888 privileged
+"""
+
+ profile_dir = None
+ locations_file = None
+
+ def setUp(self):
+ self.profile_dir = tempfile.mkdtemp()
+ self.locations_file = mozfile.NamedTemporaryFile()
+ self.locations_file.write(self.locations)
+ self.locations_file.flush()
+
+ def tearDown(self):
+ if self.profile_dir:
+ shutil.rmtree(self.profile_dir)
+ if self.locations_file:
+ self.locations_file.close()
+
+ def write_perm_db(self, version=3):
+ permDB = sqlite3.connect(os.path.join(self.profile_dir, "permissions.sqlite"))
+ cursor = permDB.cursor()
+
+ cursor.execute("PRAGMA user_version=%d;" % version)
+
+ if version == 5:
+ cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
+ id INTEGER PRIMARY KEY,
+ origin TEXT,
+ type TEXT,
+ permission INTEGER,
+ expireType INTEGER,
+ expireTime INTEGER,
+ modificationTime INTEGER)""")
+ elif version == 4:
+ cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
+ id INTEGER PRIMARY KEY,
+ host TEXT,
+ type TEXT,
+ permission INTEGER,
+ expireType INTEGER,
+ expireTime INTEGER,
+ modificationTime INTEGER,
+ appId INTEGER,
+ isInBrowserElement INTEGER)""")
+ elif version == 3:
+ cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
+ id INTEGER PRIMARY KEY,
+ host TEXT,
+ type TEXT,
+ permission INTEGER,
+ expireType INTEGER,
+ expireTime INTEGER,
+ appId INTEGER,
+ isInBrowserElement INTEGER)""")
+ elif version == 2:
+ cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
+ id INTEGER PRIMARY KEY,
+ host TEXT,
+ type TEXT,
+ permission INTEGER,
+ expireType INTEGER,
+ expireTime INTEGER)""")
+ else:
+ raise Exception("version must be 2, 3, 4 or 5")
+
+ permDB.commit()
+ cursor.close()
+
+ def test_create_permissions_db(self):
+ perms = Permissions(self.profile_dir, self.locations_file.name)
+ perms_db_filename = os.path.join(self.profile_dir, 'permissions.sqlite')
+
+ select_stmt = 'select origin, type, permission from moz_hosts'
+
+ con = sqlite3.connect(perms_db_filename)
+ cur = con.cursor()
+ cur.execute(select_stmt)
+ entries = cur.fetchall()
+
+ self.assertEqual(len(entries), 3)
+
+ self.assertEqual(entries[0][0], 'http://mochi.test:8888')
+ self.assertEqual(entries[0][1], 'allowXULXBL')
+ self.assertEqual(entries[0][2], 1)
+
+ self.assertEqual(entries[1][0], 'http://127.0.0.1')
+ self.assertEqual(entries[1][1], 'allowXULXBL')
+ self.assertEqual(entries[1][2], 2)
+
+ self.assertEqual(entries[2][0], 'http://127.0.0.1:8888')
+ self.assertEqual(entries[2][1], 'allowXULXBL')
+ self.assertEqual(entries[2][2], 1)
+
+ perms._locations.add_host('a.b.c', port='8081', scheme='https', options='noxul')
+
+ cur.execute(select_stmt)
+ entries = cur.fetchall()
+
+ self.assertEqual(len(entries), 4)
+ self.assertEqual(entries[3][0], 'https://a.b.c:8081')
+ self.assertEqual(entries[3][1], 'allowXULXBL')
+ self.assertEqual(entries[3][2], 2)
+
+ # when creating a DB we should default to user_version==5
+ cur.execute('PRAGMA user_version')
+ entries = cur.fetchall()
+ self.assertEqual(entries[0][0], 5)
+
+ perms.clean_db()
+ # table should be removed
+ cur.execute("select * from sqlite_master where type='table'")
+ entries = cur.fetchall()
+ self.assertEqual(len(entries), 0)
+
+ def test_nw_prefs(self):
+ perms = Permissions(self.profile_dir, self.locations_file.name)
+
+ prefs, user_prefs = perms.network_prefs(False)
+
+ self.assertEqual(len(user_prefs), 0)
+ self.assertEqual(len(prefs), 0)
+
+ prefs, user_prefs = perms.network_prefs(True)
+ self.assertEqual(len(user_prefs), 2)
+ self.assertEqual(user_prefs[0], ('network.proxy.type', 2))
+ self.assertEqual(user_prefs[1][0], 'network.proxy.autoconfig_url')
+
+ origins_decl = "var knownOrigins = (function () { return ['http://mochi.test:8888', " \
+ "'http://127.0.0.1:80', 'http://127.0.0.1:8888'].reduce"
+ self.assertTrue(origins_decl in user_prefs[1][1])
+
+ proxy_check = ("'http': 'PROXY mochi.test:8888'",
+ "'https': 'PROXY mochi.test:4443'",
+ "'ws': 'PROXY mochi.test:4443'",
+ "'wss': 'PROXY mochi.test:4443'")
+ self.assertTrue(all(c in user_prefs[1][1] for c in proxy_check))
+
+ def verify_user_version(self, version):
+ """Verifies that we call INSERT statements using the correct number
+ of columns for existing databases.
+ """
+ self.write_perm_db(version=version)
+ Permissions(self.profile_dir, self.locations_file.name)
+ perms_db_filename = os.path.join(self.profile_dir, 'permissions.sqlite')
+
+ select_stmt = 'select * from moz_hosts'
+
+ con = sqlite3.connect(perms_db_filename)
+ cur = con.cursor()
+ cur.execute(select_stmt)
+ entries = cur.fetchall()
+
+ self.assertEqual(len(entries), 3)
+
+ columns = {
+ 1: 6,
+ 2: 6,
+ 3: 8,
+ 4: 9,
+ 5: 7,
+ }[version]
+
+ self.assertEqual(len(entries[0]), columns)
+ for x in range(4, columns):
+ self.assertEqual(entries[0][x], 0)
+
+ def test_existing_permissions_db_v2(self):
+ self.verify_user_version(2)
+
+ def test_existing_permissions_db_v3(self):
+ self.verify_user_version(3)
+
+ def test_existing_permissions_db_v4(self):
+ self.verify_user_version(4)
+
+ def test_existing_permissions_db_v5(self):
+ self.verify_user_version(5)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/server_locations.py b/testing/mozbase/mozprofile/tests/server_locations.py
new file mode 100644
index 000000000..5aa5c0f5e
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/server_locations.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import unittest
+from mozprofile.permissions import ServerLocations, \
+ MissingPrimaryLocationError, MultiplePrimaryLocationsError, \
+ DuplicateLocationError, BadPortLocationError, LocationsSyntaxError
+
+
+class ServerLocationsTest(unittest.TestCase):
+ """test server locations"""
+
+ locations = """# This is the primary location from which tests run.
+#
+http://mochi.test:8888 primary,privileged
+
+# a few test locations
+http://127.0.0.1:80 privileged
+http://127.0.0.1:8888 privileged
+https://test:80 privileged
+http://example.org:80 privileged
+http://test1.example.org privileged
+
+ """
+
+ locations_no_primary = """http://secondary.test:80 privileged
+http://tertiary.test:8888 privileged
+"""
+
+ locations_bad_port = """http://mochi.test:8888 primary,privileged
+http://127.0.0.1:80 privileged
+http://127.0.0.1:8888 privileged
+http://test:badport privileged
+http://example.org:80 privileged
+"""
+
+ def compare_location(self, location, scheme, host, port, options):
+ self.assertEqual(location.scheme, scheme)
+ self.assertEqual(location.host, host)
+ self.assertEqual(location.port, port)
+ self.assertEqual(location.options, options)
+
+ def create_temp_file(self, contents):
+ f = mozfile.NamedTemporaryFile()
+ f.write(contents)
+ f.flush()
+ return f
+
+ def test_server_locations(self):
+ # write a permissions file
+ f = self.create_temp_file(self.locations)
+
+ # read the locations
+ locations = ServerLocations(f.name)
+
+ # ensure that they're what we expect
+ self.assertEqual(len(locations), 6)
+ i = iter(locations)
+ self.compare_location(i.next(), 'http', 'mochi.test', '8888',
+ ['primary', 'privileged'])
+ self.compare_location(i.next(), 'http', '127.0.0.1', '80',
+ ['privileged'])
+ self.compare_location(i.next(), 'http', '127.0.0.1', '8888',
+ ['privileged'])
+ self.compare_location(i.next(), 'https', 'test', '80', ['privileged'])
+ self.compare_location(i.next(), 'http', 'example.org', '80',
+ ['privileged'])
+ self.compare_location(i.next(), 'http', 'test1.example.org', '8888',
+ ['privileged'])
+
+ locations.add_host('mozilla.org')
+ self.assertEqual(len(locations), 7)
+ self.compare_location(i.next(), 'http', 'mozilla.org', '80',
+ ['privileged'])
+
+ # test some errors
+ self.assertRaises(MultiplePrimaryLocationsError, locations.add_host,
+ 'primary.test', options='primary')
+
+ # We no longer throw these DuplicateLocation Error
+ try:
+ locations.add_host('127.0.0.1')
+ except DuplicateLocationError:
+ self.assertTrue(False, "Should no longer throw DuplicateLocationError")
+
+ self.assertRaises(BadPortLocationError, locations.add_host, '127.0.0.1',
+ port='abc')
+
+ # test some errors in locations file
+ f = self.create_temp_file(self.locations_no_primary)
+
+ exc = None
+ try:
+ ServerLocations(f.name)
+ except LocationsSyntaxError as e:
+ exc = e
+ self.assertNotEqual(exc, None)
+ self.assertEqual(exc.err.__class__, MissingPrimaryLocationError)
+ self.assertEqual(exc.lineno, 3)
+
+ # test bad port in a locations file to ensure lineno calculated
+ # properly.
+ f = self.create_temp_file(self.locations_bad_port)
+
+ exc = None
+ try:
+ ServerLocations(f.name)
+ except LocationsSyntaxError as e:
+ exc = e
+ self.assertNotEqual(exc, None)
+ self.assertEqual(exc.err.__class__, BadPortLocationError)
+ self.assertEqual(exc.lineno, 4)
+
+ def test_server_locations_callback(self):
+ class CallbackTest(object):
+ last_locations = None
+
+ def callback(self, locations):
+ self.last_locations = locations
+
+ c = CallbackTest()
+ f = self.create_temp_file(self.locations)
+ locations = ServerLocations(f.name, c.callback)
+
+ # callback should be for all locations in file
+ self.assertEqual(len(c.last_locations), 6)
+
+ # validate arbitrary one
+ self.compare_location(c.last_locations[2], 'http', '127.0.0.1', '8888',
+ ['privileged'])
+
+ locations.add_host('a.b.c')
+
+ # callback should be just for one location
+ self.assertEqual(len(c.last_locations), 1)
+ self.compare_location(c.last_locations[0], 'http', 'a.b.c', '80',
+ ['privileged'])
+
+ # read a second file, which should generate a callback with both
+ # locations.
+ f = self.create_temp_file(self.locations_no_primary)
+ locations.read(f.name)
+ self.assertEqual(len(c.last_locations), 2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_addons.py b/testing/mozbase/mozprofile/tests/test_addons.py
new file mode 100644
index 000000000..93b930fea
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_addons.py
@@ -0,0 +1,415 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+import unittest
+import urllib2
+
+from manifestparser import ManifestParser
+import mozfile
+import mozhttpd
+import mozlog.unstructured as mozlog
+import mozprofile
+
+from addon_stubs import generate_addon, generate_manifest
+
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestAddonsManager(unittest.TestCase):
+ """ Class to test mozprofile.addons.AddonManager """
+
+ def setUp(self):
+ self.logger = mozlog.getLogger('mozprofile.addons')
+ self.logger.setLevel(mozlog.ERROR)
+
+ self.profile = mozprofile.profile.Profile()
+ self.am = self.profile.addon_manager
+
+ self.profile_path = self.profile.profile
+ self.tmpdir = tempfile.mkdtemp()
+ self.addCleanup(mozfile.remove, self.tmpdir)
+
+ def test_install_addons_multiple_same_source(self):
+ # Generate installer stubs for all possible types of addons
+ addon_xpi = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir)
+ addon_folder = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir,
+ xpi=False)
+
+ # The same folder should not be installed twice
+ self.am.install_addons([addon_folder, addon_folder])
+ self.assertEqual(self.am.installed_addons, [addon_folder])
+ self.am.clean()
+
+ # The same XPI file should not be installed twice
+ self.am.install_addons([addon_xpi, addon_xpi])
+ self.assertEqual(self.am.installed_addons, [addon_xpi])
+ self.am.clean()
+
+ # Even if it is the same id the add-on should be installed twice, if
+ # specified via XPI and folder
+ self.am.install_addons([addon_folder, addon_xpi])
+ self.assertEqual(len(self.am.installed_addons), 2)
+ self.assertIn(addon_folder, self.am.installed_addons)
+ self.assertIn(addon_xpi, self.am.installed_addons)
+ self.am.clean()
+
+ def test_download(self):
+ server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
+ server.start()
+
+ # Download a valid add-on without a class instance to the general
+ # tmp folder and clean-up
+ try:
+ addon = server.get_url() + 'empty.xpi'
+ xpi_file = mozprofile.addons.AddonManager.download(addon)
+ self.assertTrue(os.path.isfile(xpi_file))
+ self.assertIn('test-empty@quality.mozilla.org.xpi',
+ os.path.basename(xpi_file))
+ self.assertNotIn(self.tmpdir, os.path.dirname(xpi_file))
+ finally:
+ # Given that the file is stored outside of the created tmp dir
+ # we have to ensure to explicitely remove it
+ if os.path.isfile(xpi_file):
+ os.remove(xpi_file)
+
+ # Download an valid add-on to a special folder
+ addon = server.get_url() + 'empty.xpi'
+ xpi_file = self.am.download(addon, self.tmpdir)
+ self.assertTrue(os.path.isfile(xpi_file))
+ self.assertIn('test-empty@quality.mozilla.org.xpi',
+ os.path.basename(xpi_file))
+ self.assertIn(self.tmpdir, os.path.dirname(xpi_file))
+ self.assertEqual(self.am.downloaded_addons, [])
+ os.remove(xpi_file)
+
+ # Download an invalid add-on to a special folder
+ addon = server.get_url() + 'invalid.xpi'
+ self.assertRaises(mozprofile.addons.AddonFormatError,
+ self.am.download, addon, self.tmpdir)
+ self.assertEqual(os.listdir(self.tmpdir), [])
+
+ # Download from an invalid URL
+ addon = server.get_url() + 'not_existent.xpi'
+ self.assertRaises(urllib2.HTTPError,
+ self.am.download, addon, self.tmpdir)
+ self.assertEqual(os.listdir(self.tmpdir), [])
+
+ # Download from an invalid URL
+ addon = 'not_existent.xpi'
+ self.assertRaises(ValueError,
+ self.am.download, addon, self.tmpdir)
+ self.assertEqual(os.listdir(self.tmpdir), [])
+
+ server.stop()
+
+ def test_install_from_path_xpi(self):
+ addons_to_install = []
+ addons_installed = []
+
+ # Generate installer stubs and install them
+ for ext in ['test-addon-1@mozilla.org', 'test-addon-2@mozilla.org']:
+ temp_addon = generate_addon(ext, path=self.tmpdir)
+ addons_to_install.append(self.am.addon_details(temp_addon)['id'])
+ self.am.install_from_path(temp_addon)
+
+ # Generate a list of addons installed in the profile
+ addons_installed = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
+ self.profile.profile, 'extensions', 'staged'))]
+ self.assertEqual(addons_to_install.sort(), addons_installed.sort())
+
+ def test_install_from_path_folder(self):
+ # Generate installer stubs for all possible types of addons
+ addons = []
+ addons.append(generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir))
+ addons.append(generate_addon('test-addon-2@mozilla.org',
+ path=self.tmpdir,
+ xpi=False))
+ addons.append(generate_addon('test-addon-3@mozilla.org',
+ path=self.tmpdir,
+ name='addon-3'))
+ addons.append(generate_addon('test-addon-4@mozilla.org',
+ path=self.tmpdir,
+ name='addon-4',
+ xpi=False))
+ addons.sort()
+
+ self.am.install_from_path(self.tmpdir)
+
+ self.assertEqual(self.am.installed_addons, addons)
+
+ def test_install_from_path_unpack(self):
+ # Generate installer stubs for all possible types of addons
+ addon_xpi = generate_addon('test-addon-unpack@mozilla.org',
+ path=self.tmpdir)
+ addon_folder = generate_addon('test-addon-unpack@mozilla.org',
+ path=self.tmpdir,
+ xpi=False)
+ addon_no_unpack = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir)
+
+ # Test unpack flag for add-on as XPI
+ self.am.install_from_path(addon_xpi)
+ self.assertEqual(self.am.installed_addons, [addon_xpi])
+ self.am.clean()
+
+ # Test unpack flag for add-on as folder
+ self.am.install_from_path(addon_folder)
+ self.assertEqual(self.am.installed_addons, [addon_folder])
+ self.am.clean()
+
+ # Test forcing unpack an add-on
+ self.am.install_from_path(addon_no_unpack, unpack=True)
+ self.assertEqual(self.am.installed_addons, [addon_no_unpack])
+ self.am.clean()
+
+ def test_install_from_path_url(self):
+ server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
+ server.start()
+
+ addon = server.get_url() + 'empty.xpi'
+ self.am.install_from_path(addon)
+
+ server.stop()
+
+ self.assertEqual(len(self.am.downloaded_addons), 1)
+ self.assertTrue(os.path.isfile(self.am.downloaded_addons[0]))
+ self.assertIn('test-empty@quality.mozilla.org.xpi',
+ os.path.basename(self.am.downloaded_addons[0]))
+
+ def test_install_from_path_after_reset(self):
+ # Installing the same add-on after a reset should not cause a failure
+ addon = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir, xpi=False)
+
+ # We cannot use self.am because profile.reset() creates a new instance
+ self.profile.addon_manager.install_from_path(addon)
+
+ self.profile.reset()
+
+ self.profile.addon_manager.install_from_path(addon)
+ self.assertEqual(self.profile.addon_manager.installed_addons, [addon])
+
+ def test_install_from_path_backup(self):
+ staged_path = os.path.join(self.profile_path, 'extensions', 'staged')
+
+ # Generate installer stubs for all possible types of addons
+ addon_xpi = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir)
+ addon_folder = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir,
+ xpi=False)
+ addon_name = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir,
+ name='test-addon-1-dupe@mozilla.org')
+
+ # Test backup of xpi files
+ self.am.install_from_path(addon_xpi)
+ self.assertIsNone(self.am.backup_dir)
+
+ self.am.install_from_path(addon_xpi)
+ self.assertIsNotNone(self.am.backup_dir)
+ self.assertEqual(os.listdir(self.am.backup_dir),
+ ['test-addon-1@mozilla.org.xpi'])
+
+ self.am.clean()
+ self.assertEqual(os.listdir(staged_path),
+ ['test-addon-1@mozilla.org.xpi'])
+ self.am.clean()
+
+ # Test backup of folders
+ self.am.install_from_path(addon_folder)
+ self.assertIsNone(self.am.backup_dir)
+
+ self.am.install_from_path(addon_folder)
+ self.assertIsNotNone(self.am.backup_dir)
+ self.assertEqual(os.listdir(self.am.backup_dir),
+ ['test-addon-1@mozilla.org'])
+
+ self.am.clean()
+ self.assertEqual(os.listdir(staged_path),
+ ['test-addon-1@mozilla.org'])
+ self.am.clean()
+
+ # Test backup of xpi files with another file name
+ self.am.install_from_path(addon_name)
+ self.assertIsNone(self.am.backup_dir)
+
+ self.am.install_from_path(addon_xpi)
+ self.assertIsNotNone(self.am.backup_dir)
+ self.assertEqual(os.listdir(self.am.backup_dir),
+ ['test-addon-1@mozilla.org.xpi'])
+
+ self.am.clean()
+ self.assertEqual(os.listdir(staged_path),
+ ['test-addon-1@mozilla.org.xpi'])
+ self.am.clean()
+
+ def test_install_from_path_invalid_addons(self):
+ # Generate installer stubs for all possible types of addons
+ addons = []
+ addons.append(generate_addon('test-addon-invalid-no-manifest@mozilla.org',
+ path=self.tmpdir,
+ xpi=False))
+ addons.append(generate_addon('test-addon-invalid-no-id@mozilla.org',
+ path=self.tmpdir))
+
+ self.am.install_from_path(self.tmpdir)
+
+ self.assertEqual(self.am.installed_addons, [])
+
+ @unittest.skip("Feature not implemented as part of AddonManger")
+ def test_install_from_path_error(self):
+ """ Check install_from_path raises an error with an invalid addon"""
+
+ temp_addon = generate_addon('test-addon-invalid-version@mozilla.org')
+ # This should raise an error here
+ self.am.install_from_path(temp_addon)
+
+ def test_install_from_manifest(self):
+ temp_manifest = generate_manifest(['test-addon-1@mozilla.org',
+ 'test-addon-2@mozilla.org'])
+ m = ManifestParser()
+ m.read(temp_manifest)
+ addons = m.get()
+
+ # Obtain details of addons to install from the manifest
+ addons_to_install = [self.am.addon_details(x['path']).get('id') for x in addons]
+
+ self.am.install_from_manifest(temp_manifest)
+ # Generate a list of addons installed in the profile
+ addons_installed = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
+ self.profile.profile, 'extensions', 'staged'))]
+ self.assertEqual(addons_installed.sort(), addons_to_install.sort())
+
+ # Cleanup the temporary addon and manifest directories
+ mozfile.rmtree(os.path.dirname(temp_manifest))
+
+ def test_addon_details(self):
+ # Generate installer stubs for a valid and invalid add-on manifest
+ valid_addon = generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir)
+ invalid_addon = generate_addon('test-addon-invalid-not-wellformed@mozilla.org',
+ path=self.tmpdir)
+
+ # Check valid add-on
+ details = self.am.addon_details(valid_addon)
+ self.assertEqual(details['id'], 'test-addon-1@mozilla.org')
+ self.assertEqual(details['name'], 'Test Add-on 1')
+ self.assertEqual(details['unpack'], False)
+ self.assertEqual(details['version'], '0.1')
+
+ # Check invalid add-on
+ self.assertRaises(mozprofile.addons.AddonFormatError,
+ self.am.addon_details, invalid_addon)
+
+ # Check invalid path
+ self.assertRaises(IOError,
+ self.am.addon_details, '')
+
+ # Check invalid add-on format
+ addon_path = os.path.join(os.path.join(here, 'files'), 'not_an_addon.txt')
+ self.assertRaises(mozprofile.addons.AddonFormatError,
+ self.am.addon_details, addon_path)
+
+ @unittest.skip("Bug 900154")
+ def test_clean_addons(self):
+ addon_one = generate_addon('test-addon-1@mozilla.org')
+ addon_two = generate_addon('test-addon-2@mozilla.org')
+
+ self.am.install_addons(addon_one)
+ installed_addons = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
+ self.profile.profile, 'extensions', 'staged'))]
+
+ # Create a new profile based on an existing profile
+ # Install an extra addon in the new profile
+ # Cleanup addons
+ duplicate_profile = mozprofile.profile.Profile(profile=self.profile.profile,
+ addons=addon_two)
+ duplicate_profile.addon_manager.clean()
+
+ addons_after_cleanup = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
+ duplicate_profile.profile, 'extensions', 'staged'))]
+ # New addons installed should be removed by clean_addons()
+ self.assertEqual(installed_addons, addons_after_cleanup)
+
+ def test_noclean(self):
+ """test `restore=True/False` functionality"""
+
+ server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
+ server.start()
+
+ profile = tempfile.mkdtemp()
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ # empty initially
+ self.assertFalse(bool(os.listdir(profile)))
+
+ # make an addon
+ addons = []
+ addons.append(generate_addon('test-addon-1@mozilla.org',
+ path=tmpdir))
+ addons.append(server.get_url() + 'empty.xpi')
+
+ # install it with a restore=True AddonManager
+ am = mozprofile.addons.AddonManager(profile, restore=True)
+
+ for addon in addons:
+ am.install_from_path(addon)
+
+ # now its there
+ self.assertEqual(os.listdir(profile), ['extensions'])
+ staging_folder = os.path.join(profile, 'extensions', 'staged')
+ self.assertTrue(os.path.exists(staging_folder))
+ self.assertEqual(len(os.listdir(staging_folder)), 2)
+
+ # del addons; now its gone though the directory tree exists
+ downloaded_addons = am.downloaded_addons
+ del am
+
+ self.assertEqual(os.listdir(profile), ['extensions'])
+ self.assertTrue(os.path.exists(staging_folder))
+ self.assertEqual(os.listdir(staging_folder), [])
+
+ for addon in downloaded_addons:
+ self.assertFalse(os.path.isfile(addon))
+
+ finally:
+ mozfile.rmtree(tmpdir)
+ mozfile.rmtree(profile)
+
+ def test_remove_addon(self):
+ addons = []
+ addons.append(generate_addon('test-addon-1@mozilla.org',
+ path=self.tmpdir))
+ addons.append(generate_addon('test-addon-2@mozilla.org',
+ path=self.tmpdir))
+
+ self.am.install_from_path(self.tmpdir)
+
+ extensions_path = os.path.join(self.profile_path, 'extensions')
+ staging_path = os.path.join(extensions_path, 'staged')
+
+ # Fake a run by virtually installing one of the staged add-ons
+ shutil.move(os.path.join(staging_path, 'test-addon-1@mozilla.org.xpi'),
+ extensions_path)
+
+ for addon in self.am._addons:
+ self.am.remove_addon(addon)
+
+ self.assertEqual(os.listdir(staging_path), [])
+ self.assertEqual(os.listdir(extensions_path), ['staged'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_clone_cleanup.py b/testing/mozbase/mozprofile/tests/test_clone_cleanup.py
new file mode 100644
index 000000000..51c7ba03e
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_clone_cleanup.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import os
+import tempfile
+import unittest
+import mozfile
+
+from mozprofile.profile import Profile
+
+
+class CloneCleanupTest(unittest.TestCase):
+ """
+ test cleanup logic for the clone functionality
+ see https://bugzilla.mozilla.org/show_bug.cgi?id=642843
+ """
+
+ def setUp(self):
+ # make a profile with one preference
+ path = tempfile.mktemp()
+ self.addCleanup(mozfile.remove, path)
+ self.profile = Profile(path,
+ preferences={'foo': 'bar'},
+ restore=False)
+ user_js = os.path.join(self.profile.profile, 'user.js')
+ self.assertTrue(os.path.exists(user_js))
+
+ def test_restore_true(self):
+ # make a clone of this profile with restore=True
+ clone = Profile.clone(self.profile.profile, restore=True)
+ self.addCleanup(mozfile.remove, clone.profile)
+
+ clone.cleanup()
+
+ # clone should be deleted
+ self.assertFalse(os.path.exists(clone.profile))
+
+ def test_restore_false(self):
+ # make a clone of this profile with restore=False
+ clone = Profile.clone(self.profile.profile, restore=False)
+ self.addCleanup(mozfile.remove, clone.profile)
+
+ clone.cleanup()
+
+ # clone should still be around on the filesystem
+ self.assertTrue(os.path.exists(clone.profile))
+
+ def test_cleanup_on_garbage_collected(self):
+ clone = Profile.clone(self.profile.profile)
+ self.addCleanup(mozfile.remove, clone.profile)
+ profile_dir = clone.profile
+ self.assertTrue(os.path.exists(profile_dir))
+ del clone
+ # clone should be deleted
+ self.assertFalse(os.path.exists(profile_dir))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_nonce.py b/testing/mozbase/mozprofile/tests/test_nonce.py
new file mode 100755
index 000000000..fef262272
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_nonce.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+"""
+test nonce in prefs delimeters
+see https://bugzilla.mozilla.org/show_bug.cgi?id=722804
+"""
+
+import os
+import tempfile
+import unittest
+import mozfile
+from mozprofile.prefs import Preferences
+from mozprofile.profile import Profile
+
+
+class PreferencesNonceTest(unittest.TestCase):
+
+ def test_nonce(self):
+
+ # make a profile with one preference
+ path = tempfile.mktemp()
+ self.addCleanup(mozfile.remove, path)
+ profile = Profile(path,
+ preferences={'foo': 'bar'},
+ restore=False)
+ user_js = os.path.join(profile.profile, 'user.js')
+ self.assertTrue(os.path.exists(user_js))
+
+ # ensure the preference is correct
+ prefs = Preferences.read_prefs(user_js)
+ self.assertEqual(dict(prefs), {'foo': 'bar'})
+
+ del profile
+
+ # augment the profile with a second preference
+ profile = Profile(path,
+ preferences={'fleem': 'baz'},
+ restore=True)
+ prefs = Preferences.read_prefs(user_js)
+ self.assertEqual(dict(prefs), {'foo': 'bar', 'fleem': 'baz'})
+
+ # cleanup the profile;
+ # this should remove the new preferences but not the old
+ profile.cleanup()
+ prefs = Preferences.read_prefs(user_js)
+ self.assertEqual(dict(prefs), {'foo': 'bar'})
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_preferences.py b/testing/mozbase/mozprofile/tests/test_preferences.py
new file mode 100755
index 000000000..45d99c2e2
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_preferences.py
@@ -0,0 +1,378 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import mozhttpd
+import os
+import shutil
+import tempfile
+import unittest
+from mozprofile.cli import MozProfileCLI
+from mozprofile.prefs import Preferences
+from mozprofile.profile import Profile
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class PreferencesTest(unittest.TestCase):
+ """test mozprofile preference handling"""
+
+ # preferences from files/prefs_with_comments.js
+ _prefs_with_comments = {'browser.startup.homepage': 'http://planet.mozilla.org',
+ 'zoom.minPercent': 30,
+ 'zoom.maxPercent': 300,
+ 'webgl.verbose': 'false'}
+
+ def run_command(self, *args):
+ """
+ invokes mozprofile command line via the CLI factory
+ - args : command line arguments (equivalent of sys.argv[1:])
+ """
+
+ # instantiate the factory
+ cli = MozProfileCLI(list(args))
+
+ # create the profile
+ profile = cli.profile()
+
+ # return path to profile
+ return profile.profile
+
+ def compare_generated(self, _prefs, commandline):
+ """
+ writes out to a new profile with mozprofile command line
+ reads the generated preferences with prefs.py
+ compares the results
+ cleans up
+ """
+ profile = self.run_command(*commandline)
+ prefs_file = os.path.join(profile, 'user.js')
+ self.assertTrue(os.path.exists(prefs_file))
+ read = Preferences.read_prefs(prefs_file)
+ if isinstance(_prefs, dict):
+ read = dict(read)
+ self.assertEqual(_prefs, read)
+ shutil.rmtree(profile)
+
+ def test_basic_prefs(self):
+ """test setting a pref from the command line entry point"""
+
+ _prefs = {"browser.startup.homepage": "http://planet.mozilla.org/"}
+ commandline = []
+ _prefs = _prefs.items()
+ for pref, value in _prefs:
+ commandline += ["--pref", "%s:%s" % (pref, value)]
+ self.compare_generated(_prefs, commandline)
+
+ def test_ordered_prefs(self):
+ """ensure the prefs stay in the right order"""
+ _prefs = [("browser.startup.homepage", "http://planet.mozilla.org/"),
+ ("zoom.minPercent", 30),
+ ("zoom.maxPercent", 300),
+ ("webgl.verbose", 'false')]
+ commandline = []
+ for pref, value in _prefs:
+ commandline += ["--pref", "%s:%s" % (pref, value)]
+ _prefs = [(i, Preferences.cast(j)) for i, j in _prefs]
+ self.compare_generated(_prefs, commandline)
+
+ def test_ini(self):
+
+ # write the .ini file
+ _ini = """[DEFAULT]
+browser.startup.homepage = http://planet.mozilla.org/
+
+[foo]
+browser.startup.homepage = http://github.com/
+"""
+ try:
+ fd, name = tempfile.mkstemp(suffix='.ini')
+ os.write(fd, _ini)
+ os.close(fd)
+ commandline = ["--preferences", name]
+
+ # test the [DEFAULT] section
+ _prefs = {'browser.startup.homepage': 'http://planet.mozilla.org/'}
+ self.compare_generated(_prefs, commandline)
+
+ # test a specific section
+ _prefs = {'browser.startup.homepage': 'http://github.com/'}
+ commandline[-1] = commandline[-1] + ':foo'
+ self.compare_generated(_prefs, commandline)
+
+ finally:
+ # cleanup
+ os.remove(name)
+
+ def test_ini_keep_case(self):
+ """
+ Read a preferences config file with a preference in camel-case style.
+ Check that the read preference name has not been lower-cased
+ """
+ # write the .ini file
+ _ini = """[DEFAULT]
+general.warnOnAboutConfig = False
+"""
+ try:
+ fd, name = tempfile.mkstemp(suffix='.ini')
+ os.write(fd, _ini)
+ os.close(fd)
+ commandline = ["--preferences", name]
+
+ # test the [DEFAULT] section
+ _prefs = {'general.warnOnAboutConfig': 'False'}
+ self.compare_generated(_prefs, commandline)
+
+ finally:
+ # cleanup
+ os.remove(name)
+
+ def test_reset_should_remove_added_prefs(self):
+ """Check that when we call reset the items we expect are updated"""
+ profile = Profile()
+ prefs_file = os.path.join(profile.profile, 'user.js')
+
+ # we shouldn't have any initial preferences
+ initial_prefs = Preferences.read_prefs(prefs_file)
+ self.assertFalse(initial_prefs)
+ initial_prefs = file(prefs_file).read().strip()
+ self.assertFalse(initial_prefs)
+
+ # add some preferences
+ prefs1 = [("mr.t.quotes", "i aint getting on no plane!")]
+ profile.set_preferences(prefs1)
+ self.assertEqual(prefs1, Preferences.read_prefs(prefs_file))
+ lines = file(prefs_file).read().strip().splitlines()
+ self.assertTrue(any(line.startswith('#MozRunner Prefs Start') for line in lines))
+ self.assertTrue(any(line.startswith('#MozRunner Prefs End') for line in lines))
+
+ profile.reset()
+ self.assertNotEqual(prefs1,
+ Preferences.read_prefs(os.path.join(profile.profile, 'user.js')),
+ "I pity the fool who left my pref")
+
+ def test_reset_should_keep_user_added_prefs(self):
+ """Check that when we call reset the items we expect are updated"""
+ profile = Profile()
+ prefs_file = os.path.join(profile.profile, 'user.js')
+
+ # we shouldn't have any initial preferences
+ initial_prefs = Preferences.read_prefs(prefs_file)
+ self.assertFalse(initial_prefs)
+ initial_prefs = file(prefs_file).read().strip()
+ self.assertFalse(initial_prefs)
+
+ # add some preferences
+ prefs1 = [("mr.t.quotes", "i aint getting on no plane!")]
+ profile.set_persistent_preferences(prefs1)
+ self.assertEqual(prefs1, Preferences.read_prefs(prefs_file))
+ lines = file(prefs_file).read().strip().splitlines()
+ self.assertTrue(any(line.startswith('#MozRunner Prefs Start') for line in lines))
+ self.assertTrue(any(line.startswith('#MozRunner Prefs End') for line in lines))
+
+ profile.reset()
+ self.assertEqual(prefs1,
+ Preferences.read_prefs(os.path.join(profile.profile, 'user.js')),
+ "I pity the fool who left my pref")
+
+ def test_magic_markers(self):
+ """ensure our magic markers are working"""
+
+ profile = Profile()
+ prefs_file = os.path.join(profile.profile, 'user.js')
+
+ # we shouldn't have any initial preferences
+ initial_prefs = Preferences.read_prefs(prefs_file)
+ self.assertFalse(initial_prefs)
+ initial_prefs = file(prefs_file).read().strip()
+ self.assertFalse(initial_prefs)
+
+ # add some preferences
+ prefs1 = [("browser.startup.homepage", "http://planet.mozilla.org/"),
+ ("zoom.minPercent", 30)]
+ profile.set_preferences(prefs1)
+ self.assertEqual(prefs1, Preferences.read_prefs(prefs_file))
+ lines = file(prefs_file).read().strip().splitlines()
+ self.assertTrue(bool([line for line in lines
+ if line.startswith('#MozRunner Prefs Start')]))
+ self.assertTrue(bool([line for line in lines
+ if line.startswith('#MozRunner Prefs End')]))
+
+ # add some more preferences
+ prefs2 = [("zoom.maxPercent", 300),
+ ("webgl.verbose", 'false')]
+ profile.set_preferences(prefs2)
+ self.assertEqual(prefs1 + prefs2, Preferences.read_prefs(prefs_file))
+ lines = file(prefs_file).read().strip().splitlines()
+ self.assertTrue(len([line for line in lines
+ if line.startswith('#MozRunner Prefs Start')]) == 2)
+ self.assertTrue(len([line for line in lines
+ if line.startswith('#MozRunner Prefs End')]) == 2)
+
+ # now clean it up
+ profile.clean_preferences()
+ final_prefs = Preferences.read_prefs(prefs_file)
+ self.assertFalse(final_prefs)
+ lines = file(prefs_file).read().strip().splitlines()
+ self.assertTrue('#MozRunner Prefs Start' not in lines)
+ self.assertTrue('#MozRunner Prefs End' not in lines)
+
+ def test_preexisting_preferences(self):
+ """ensure you don't clobber preexisting preferences"""
+
+ # make a pretend profile
+ tempdir = tempfile.mkdtemp()
+
+ try:
+ # make a user.js
+ contents = """
+user_pref("webgl.enabled_for_all_sites", true);
+user_pref("webgl.force-enabled", true);
+"""
+ user_js = os.path.join(tempdir, 'user.js')
+ f = file(user_js, 'w')
+ f.write(contents)
+ f.close()
+
+ # make sure you can read it
+ prefs = Preferences.read_prefs(user_js)
+ original_prefs = [('webgl.enabled_for_all_sites', True), ('webgl.force-enabled', True)]
+ self.assertTrue(prefs == original_prefs)
+
+ # now read this as a profile
+ profile = Profile(tempdir, preferences={"browser.download.dir": "/home/jhammel"})
+
+ # make sure the new pref is now there
+ new_prefs = original_prefs[:] + [("browser.download.dir", "/home/jhammel")]
+ prefs = Preferences.read_prefs(user_js)
+ self.assertTrue(prefs == new_prefs)
+
+ # clean up the added preferences
+ profile.cleanup()
+ del profile
+
+ # make sure you have the original preferences
+ prefs = Preferences.read_prefs(user_js)
+ self.assertTrue(prefs == original_prefs)
+ finally:
+ shutil.rmtree(tempdir)
+
+ def test_can_read_prefs_with_multiline_comments(self):
+ """
+ Ensure that multiple comments in the file header do not break reading
+ the prefs (https://bugzilla.mozilla.org/show_bug.cgi?id=1233534).
+ """
+ user_js = tempfile.NamedTemporaryFile(suffix='.js', delete=False)
+ self.addCleanup(mozfile.remove, user_js.name)
+ with user_js:
+ user_js.write("""
+# Mozilla User Preferences
+
+/* Do not edit this file.
+ *
+ * If you make changes to this file while the application is running,
+ * the changes will be overwritten when the application exits.
+ *
+ * To make a manual change to preferences, you can visit the URL about:config
+ */
+
+user_pref("webgl.enabled_for_all_sites", true);
+user_pref("webgl.force-enabled", true);
+""")
+ self.assertEqual(
+ Preferences.read_prefs(user_js.name),
+ [('webgl.enabled_for_all_sites', True),
+ ('webgl.force-enabled', True)]
+ )
+
+ def test_json(self):
+ _prefs = {"browser.startup.homepage": "http://planet.mozilla.org/"}
+ json = '{"browser.startup.homepage": "http://planet.mozilla.org/"}'
+
+ # just repr it...could use the json module but we don't need it here
+ with mozfile.NamedTemporaryFile(suffix='.json') as f:
+ f.write(json)
+ f.flush()
+
+ commandline = ["--preferences", f.name]
+ self.compare_generated(_prefs, commandline)
+
+ def test_prefs_write(self):
+ """test that the Preferences.write() method correctly serializes preferences"""
+
+ _prefs = {'browser.startup.homepage': "http://planet.mozilla.org",
+ 'zoom.minPercent': 30,
+ 'zoom.maxPercent': 300}
+
+ # make a Preferences manager with the testing preferences
+ preferences = Preferences(_prefs)
+
+ # write them to a temporary location
+ path = None
+ read_prefs = None
+ try:
+ with mozfile.NamedTemporaryFile(suffix='.js', delete=False) as f:
+ path = f.name
+ preferences.write(f, _prefs)
+
+ # read them back and ensure we get what we put in
+ read_prefs = dict(Preferences.read_prefs(path))
+
+ finally:
+ # cleanup
+ if path and os.path.exists(path):
+ os.remove(path)
+
+ self.assertEqual(read_prefs, _prefs)
+
+ def test_read_prefs_with_comments(self):
+ """test reading preferences from a prefs.js file that contains comments"""
+
+ path = os.path.join(here, 'files', 'prefs_with_comments.js')
+ self.assertEqual(dict(Preferences.read_prefs(path)), self._prefs_with_comments)
+
+ def test_read_prefs_with_interpolation(self):
+ """test reading preferences from a prefs.js file whose values
+ require interpolation"""
+
+ expected_prefs = {
+ "browser.foo": "http://server-name",
+ "zoom.minPercent": 30,
+ "webgl.verbose": "false",
+ "browser.bar": "somethingxyz"
+ }
+ values = {
+ "server": "server-name",
+ "abc": "something"
+ }
+ path = os.path.join(here, 'files', 'prefs_with_interpolation.js')
+ read_prefs = Preferences.read_prefs(path, interpolation=values)
+ self.assertEqual(dict(read_prefs), expected_prefs)
+
+ def test_read_prefs_ttw(self):
+ """test reading preferences through the web via mozhttpd"""
+
+ # create a MozHttpd instance
+ docroot = os.path.join(here, 'files')
+ host = '127.0.0.1'
+ port = 8888
+ httpd = mozhttpd.MozHttpd(host=host, port=port, docroot=docroot)
+
+ # create a preferences instance
+ prefs = Preferences()
+
+ try:
+ # start server
+ httpd.start(block=False)
+
+ # read preferences through the web
+ read = prefs.read_prefs('http://%s:%d/prefs_with_comments.js' % (host, port))
+ self.assertEqual(dict(read), self._prefs_with_comments)
+ finally:
+ httpd.stop()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_profile.py b/testing/mozbase/mozprofile/tests/test_profile.py
new file mode 100644
index 000000000..e24de1904
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_profile.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import os
+from mozprofile import Profile
+
+
+class TestProfile(unittest.TestCase):
+
+ def test_with_profile_should_cleanup(self):
+ with Profile() as profile:
+ self.assertTrue(os.path.exists(profile.profile))
+ # profile is cleaned
+ self.assertFalse(os.path.exists(profile.profile))
+
+ def test_with_profile_should_cleanup_even_on_exception(self):
+ with self.assertRaises(ZeroDivisionError):
+ with Profile() as profile:
+ self.assertTrue(os.path.exists(profile.profile))
+ 1 / 0 # will raise ZeroDivisionError
+ # profile is cleaned
+ self.assertFalse(os.path.exists(profile.profile))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_profile_view.py b/testing/mozbase/mozprofile/tests/test_profile_view.py
new file mode 100644
index 000000000..2e10a913b
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_profile_view.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import mozprofile
+import os
+import tempfile
+import unittest
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestProfilePrint(unittest.TestCase):
+
+ def test_profileprint(self):
+ """
+ test the summary function
+ """
+
+ keys = set(['Files', 'Path', 'user.js'])
+ ff_prefs = mozprofile.FirefoxProfile.preferences # shorthand
+ pref_string = '\n'.join(['%s: %s' % (key, ff_prefs[key])
+ for key in sorted(ff_prefs.keys())])
+
+ tempdir = tempfile.mkdtemp()
+ try:
+ profile = mozprofile.FirefoxProfile(tempdir)
+ parts = profile.summary(return_parts=True)
+ parts = dict(parts)
+
+ self.assertEqual(parts['Path'], tempdir)
+ self.assertEqual(set(parts.keys()), keys)
+ self.assertEqual(pref_string, parts['user.js'].strip())
+
+ except:
+ raise
+ finally:
+ mozfile.rmtree(tempdir)
+
+ def test_strcast(self):
+ """
+ test casting to a string
+ """
+
+ profile = mozprofile.Profile()
+ self.assertEqual(str(profile), profile.summary())
+
+ def test_profile_diff(self):
+ profile1 = mozprofile.Profile()
+ profile2 = mozprofile.Profile(preferences=dict(foo='bar'))
+
+ # diff a profile against itself; no difference
+ self.assertEqual([], mozprofile.diff(profile1, profile1))
+
+ # diff two profiles
+ diff = dict(mozprofile.diff(profile1, profile2))
+ self.assertEqual(diff.keys(), ['user.js'])
+ lines = [line.strip() for line in diff['user.js'].splitlines()]
+ self.assertTrue('+foo: bar' in lines)
+
+ # diff a blank vs FirefoxProfile
+ ff_profile = mozprofile.FirefoxProfile()
+ diff = dict(mozprofile.diff(profile2, ff_profile))
+ self.assertEqual(diff.keys(), ['user.js'])
+ lines = [line.strip() for line in diff['user.js'].splitlines()]
+ self.assertTrue('-foo: bar' in lines)
+ ff_pref_lines = ['+%s: %s' % (key, value)
+ for key, value in mozprofile.FirefoxProfile.preferences.items()]
+ self.assertTrue(set(ff_pref_lines).issubset(lines))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozprofile/tests/test_webapps.py b/testing/mozbase/mozprofile/tests/test_webapps.py
new file mode 100755
index 000000000..4db992d69
--- /dev/null
+++ b/testing/mozbase/mozprofile/tests/test_webapps.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+
+"""
+test installing and managing webapps in a profile
+"""
+
+import os
+import shutil
+import unittest
+from tempfile import mkdtemp
+
+from mozprofile.webapps import WebappCollection, Webapp, WebappFormatException
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class WebappTest(unittest.TestCase):
+ """Tests reading, installing and cleaning webapps
+ from a profile.
+ """
+ manifest_path_1 = os.path.join(here, 'files', 'webapps1.json')
+ manifest_path_2 = os.path.join(here, 'files', 'webapps2.json')
+
+ def setUp(self):
+ self.profile = mkdtemp(prefix='test_webapp')
+ self.webapps_dir = os.path.join(self.profile, 'webapps')
+ self.webapps_json_path = os.path.join(self.webapps_dir, 'webapps.json')
+
+ def tearDown(self):
+ shutil.rmtree(self.profile)
+
+ def test_read_json_manifest(self):
+ """Tests WebappCollection.read_json"""
+ # Parse a list of webapp objects and verify it worked
+ manifest_json_1 = WebappCollection.read_json(self.manifest_path_1)
+ self.assertEqual(len(manifest_json_1), 7)
+ for app in manifest_json_1:
+ self.assertIsInstance(app, Webapp)
+ for key in Webapp.required_keys:
+ self.assertIn(key, app)
+
+ # Parse a dictionary of webapp objects and verify it worked
+ manifest_json_2 = WebappCollection.read_json(self.manifest_path_2)
+ self.assertEqual(len(manifest_json_2), 5)
+ for app in manifest_json_2:
+ self.assertIsInstance(app, Webapp)
+ for key in Webapp.required_keys:
+ self.assertIn(key, app)
+
+ def test_invalid_webapp(self):
+ """Tests a webapp with a missing required key"""
+ webapps = WebappCollection(self.profile)
+ # Missing the required key "description", exception should be raised
+ self.assertRaises(WebappFormatException, webapps.append, {'name': 'foo'})
+
+ def test_webapp_collection(self):
+ """Tests the methods of the WebappCollection object"""
+ webapp_1 = {'name': 'test_app_1',
+ 'description': 'a description',
+ 'manifestURL': 'http://example.com/1/manifest.webapp',
+ 'appStatus': 1}
+
+ webapp_2 = {'name': 'test_app_2',
+ 'description': 'another description',
+ 'manifestURL': 'http://example.com/2/manifest.webapp',
+ 'appStatus': 2}
+
+ webapp_3 = {'name': 'test_app_2',
+ 'description': 'a third description',
+ 'manifestURL': 'http://example.com/3/manifest.webapp',
+ 'appStatus': 3}
+
+ webapps = WebappCollection(self.profile)
+ self.assertEqual(len(webapps), 0)
+
+ # WebappCollection should behave like a list
+ def invalid_index():
+ webapps[0]
+ self.assertRaises(IndexError, invalid_index)
+
+ # Append a webapp object
+ webapps.append(webapp_1)
+ self.assertTrue(len(webapps), 1)
+ self.assertIsInstance(webapps[0], Webapp)
+ self.assertEqual(len(webapps[0]), len(webapp_1))
+ self.assertEqual(len(set(webapps[0].items()) & set(webapp_1.items())), len(webapp_1))
+
+ # Remove a webapp object
+ webapps.remove(webapp_1)
+ self.assertEqual(len(webapps), 0)
+
+ # Extend a list of webapp objects
+ webapps.extend([webapp_1, webapp_2])
+ self.assertEqual(len(webapps), 2)
+ self.assertTrue(webapp_1 in webapps)
+ self.assertTrue(webapp_2 in webapps)
+ self.assertNotEquals(webapps[0], webapps[1])
+
+ # Insert a webapp object
+ webapps.insert(1, webapp_3)
+ self.assertEqual(len(webapps), 3)
+ self.assertEqual(webapps[1], webapps[2])
+ for app in webapps:
+ self.assertIsInstance(app, Webapp)
+
+ # Assigning an invalid type (must be accepted by the dict() constructor) should throw
+ def invalid_type():
+ webapps[2] = 1
+ self.assertRaises(WebappFormatException, invalid_type)
+
+ def test_install_webapps(self):
+ """Test installing webapps into a profile that has no prior webapps"""
+ webapps = WebappCollection(self.profile, apps=self.manifest_path_1)
+ self.assertFalse(os.path.exists(self.webapps_dir))
+
+ # update the webapp manifests for the first time
+ webapps.update_manifests()
+ self.assertFalse(os.path.isdir(os.path.join(self.profile, webapps.backup_dir)))
+ self.assertTrue(os.path.isfile(self.webapps_json_path))
+
+ webapps_json = webapps.read_json(self.webapps_json_path, description="fake description")
+ self.assertEqual(len(webapps_json), 7)
+ for app in webapps_json:
+ self.assertIsInstance(app, Webapp)
+
+ manifest_json_1 = webapps.read_json(self.manifest_path_1)
+ manifest_json_2 = webapps.read_json(self.manifest_path_2)
+ self.assertEqual(len(webapps_json), len(manifest_json_1))
+ for app in webapps_json:
+ self.assertTrue(app in manifest_json_1)
+
+ # Remove one of the webapps from WebappCollection after it got installed
+ removed_app = manifest_json_1[2]
+ webapps.remove(removed_app)
+ # Add new webapps to the collection
+ webapps.extend(manifest_json_2)
+
+ # update the webapp manifests a second time
+ webapps.update_manifests()
+ self.assertFalse(os.path.isdir(os.path.join(self.profile, webapps.backup_dir)))
+ self.assertTrue(os.path.isfile(self.webapps_json_path))
+
+ webapps_json = webapps.read_json(self.webapps_json_path, description="a description")
+ self.assertEqual(len(webapps_json), 11)
+
+ # The new apps should be added
+ for app in webapps_json:
+ self.assertIsInstance(app, Webapp)
+ self.assertTrue(os.path.isfile(os.path.join(self.webapps_dir, app['name'],
+ 'manifest.webapp')))
+ # The removed app should not exist in the manifest
+ self.assertNotIn(removed_app, webapps_json)
+ self.assertFalse(os.path.exists(os.path.join(self.webapps_dir, removed_app['name'])))
+
+ # Cleaning should delete the webapps directory entirely
+ # since there was nothing there before
+ webapps.clean()
+ self.assertFalse(os.path.isdir(self.webapps_dir))
+
+ def test_install_webapps_preexisting(self):
+ """Tests installing webapps when the webapps directory already exists"""
+ manifest_json_2 = WebappCollection.read_json(self.manifest_path_2)
+
+ # Synthesize a pre-existing webapps directory
+ os.mkdir(self.webapps_dir)
+ shutil.copyfile(self.manifest_path_2, self.webapps_json_path)
+ for app in manifest_json_2:
+ app_path = os.path.join(self.webapps_dir, app['name'])
+ os.mkdir(app_path)
+ f = open(os.path.join(app_path, 'manifest.webapp'), 'w')
+ f.close()
+
+ webapps = WebappCollection(self.profile, apps=self.manifest_path_1)
+ self.assertTrue(os.path.exists(self.webapps_dir))
+
+ # update webapp manifests for the first time
+ webapps.update_manifests()
+ # A backup should be created
+ self.assertTrue(os.path.isdir(os.path.join(self.profile, webapps.backup_dir)))
+
+ # Both manifests should remain installed
+ webapps_json = webapps.read_json(self.webapps_json_path, description='a fake description')
+ self.assertEqual(len(webapps_json), 12)
+ for app in webapps_json:
+ self.assertIsInstance(app, Webapp)
+ self.assertTrue(os.path.isfile(os.path.join(self.webapps_dir, app['name'],
+ 'manifest.webapp')))
+
+ # Upon cleaning the backup should be restored
+ webapps.clean()
+ self.assertFalse(os.path.isdir(os.path.join(self.profile, webapps.backup_dir)))
+
+ # The original webapps should still be installed
+ webapps_json = webapps.read_json(self.webapps_json_path)
+ for app in webapps_json:
+ self.assertIsInstance(app, Webapp)
+ self.assertTrue(os.path.isfile(os.path.join(self.webapps_dir, app['name'],
+ 'manifest.webapp')))
+ self.assertEqual(webapps_json, manifest_json_2)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozrunner/mozrunner/__init__.py b/testing/mozbase/mozrunner/mozrunner/__init__.py
new file mode 100644
index 000000000..0fec5c238
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/__init__.py
@@ -0,0 +1,11 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+from .cli import *
+from .errors import *
+from .runners import *
+
+import base
+import devices
+import utils
diff --git a/testing/mozbase/mozrunner/mozrunner/application.py b/testing/mozbase/mozrunner/mozrunner/application.py
new file mode 100644
index 000000000..6734487ae
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/application.py
@@ -0,0 +1,265 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from abc import ABCMeta, abstractmethod
+from distutils.spawn import find_executable
+import glob
+import os
+import posixpath
+
+from mozdevice import DeviceManagerADB, DMError, DroidADB
+from mozprofile import (
+ Profile,
+ FirefoxProfile,
+ MetroFirefoxProfile,
+ ThunderbirdProfile
+)
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_app_context(appname):
+ context_map = {'default': DefaultContext,
+ 'b2g': B2GContext,
+ 'firefox': FirefoxContext,
+ 'thunderbird': ThunderbirdContext,
+ 'metro': MetroContext,
+ 'fennec': FennecContext}
+ if appname not in context_map:
+ raise KeyError("Application '%s' not supported!" % appname)
+ return context_map[appname]
+
+
+class DefaultContext(object):
+ profile_class = Profile
+
+
+class RemoteContext(object):
+ __metaclass__ = ABCMeta
+ _dm = None
+ _remote_profile = None
+ _adb = None
+ profile_class = Profile
+ dm_class = DeviceManagerADB
+ _bindir = None
+ remote_test_root = ''
+ remote_process = None
+
+ @property
+ def bindir(self):
+ if self._bindir is None:
+ paths = [find_executable('emulator')]
+ paths = [p for p in paths if p is not None if os.path.isfile(p)]
+ if not paths:
+ self._bindir = ''
+ else:
+ self._bindir = os.path.dirname(paths[0])
+ return self._bindir
+
+ @property
+ def adb(self):
+ if not self._adb:
+ paths = [os.environ.get('ADB'),
+ os.environ.get('ADB_PATH'),
+ self.which('adb')]
+ paths = [p for p in paths if p is not None if os.path.isfile(p)]
+ if not paths:
+ raise OSError(
+ 'Could not find the adb binary, make sure it is on your'
+ 'path or set the $ADB_PATH environment variable.')
+ self._adb = paths[0]
+ return self._adb
+
+ @property
+ def dm(self):
+ if not self._dm:
+ self._dm = self.dm_class(adbPath=self.adb, autoconnect=False)
+ return self._dm
+
+ @property
+ def remote_profile(self):
+ if not self._remote_profile:
+ self._remote_profile = posixpath.join(self.remote_test_root,
+ 'profile')
+ return self._remote_profile
+
+ def which(self, binary):
+ paths = os.environ.get('PATH', {}).split(os.pathsep)
+ if self.bindir is not None and os.path.abspath(self.bindir) not in paths:
+ paths.insert(0, os.path.abspath(self.bindir))
+ os.environ['PATH'] = os.pathsep.join(paths)
+
+ return find_executable(binary)
+
+ @abstractmethod
+ def stop_application(self):
+ """ Run (device manager) command to stop application. """
+ pass
+
+
+class FennecContext(RemoteContext):
+ _remote_profiles_ini = None
+ _remote_test_root = None
+
+ def __init__(self, app=None, adb_path=None, avd_home=None):
+ self._adb = adb_path
+ self.avd_home = avd_home
+ self.dm_class = DroidADB
+ self.remote_process = app or self.dm._packageName
+
+ def stop_application(self):
+ self.dm.stopApplication(self.remote_process)
+
+ @property
+ def remote_test_root(self):
+ if not self._remote_test_root:
+ self._remote_test_root = self.dm.getDeviceRoot()
+ return self._remote_test_root
+
+ @property
+ def remote_profiles_ini(self):
+ if not self._remote_profiles_ini:
+ self._remote_profiles_ini = posixpath.join(
+ self.dm.getAppRoot(self.remote_process),
+ 'files', 'mozilla', 'profiles.ini'
+ )
+ return self._remote_profiles_ini
+
+
+class B2GContext(RemoteContext):
+ _remote_settings_db = None
+
+ def __init__(self, b2g_home=None, adb_path=None):
+ self.homedir = b2g_home or os.environ.get('B2G_HOME')
+
+ if self.homedir is not None and not os.path.isdir(self.homedir):
+ raise OSError('Homedir \'%s\' does not exist!' % self.homedir)
+
+ self._adb = adb_path
+ self._update_tools = None
+ self._fastboot = None
+
+ self.remote_binary = '/system/bin/b2g.sh'
+ self.remote_bundles_dir = '/system/b2g/distribution/bundles'
+ self.remote_busybox = '/system/bin/busybox'
+ self.remote_process = '/system/b2g/b2g'
+ self.remote_profiles_ini = '/data/b2g/mozilla/profiles.ini'
+ self.remote_settings_json = '/system/b2g/defaults/settings.json'
+ self.remote_idb_dir = '/data/local/storage/permanent/chrome/idb'
+ self.remote_test_root = '/data/local/tests'
+ self.remote_webapps_dir = '/data/local/webapps'
+
+ self.remote_backup_files = [
+ self.remote_settings_json,
+ self.remote_webapps_dir,
+ ]
+
+ @property
+ def fastboot(self):
+ if self._fastboot is None:
+ self._fastboot = self.which('fastboot')
+ return self._fastboot
+
+ @property
+ def update_tools(self):
+ if self._update_tools is None and self.homedir is not None:
+ self._update_tools = os.path.join(self.homedir, 'tools', 'update-tools')
+ return self._update_tools
+
+ @property
+ def bindir(self):
+ if self._bindir is None and self.homedir is not None:
+ # TODO get this via build configuration
+ path = os.path.join(self.homedir, 'out', 'host', '*', 'bin')
+ paths = glob.glob(path)
+ if paths:
+ self._bindir = paths[0]
+ return self._bindir
+
+ @property
+ def remote_settings_db(self):
+ if not self._remote_settings_db:
+ for filename in self.dm.listFiles(self.remote_idb_dir):
+ if filename.endswith('ssegtnti.sqlite'):
+ self._remote_settings_db = posixpath.join(self.remote_idb_dir, filename)
+ break
+ else:
+ raise DMError("Could not find settings db in '%s'!" % self.remote_idb_dir)
+ return self._remote_settings_db
+
+ def stop_application(self):
+ self.dm.shellCheckOutput(['stop', 'b2g'])
+
+ def setup_profile(self, profile):
+ # For some reason user.js in the profile doesn't get picked up.
+ # Manually copy it over to prefs.js. See bug 1009730 for more details.
+ self.dm.moveTree(posixpath.join(self.remote_profile, 'user.js'),
+ posixpath.join(self.remote_profile, 'prefs.js'))
+
+ if self.dm.fileExists(posixpath.join(self.remote_profile, 'settings.json')):
+ # On devices, settings.json is only read from the profile if
+ # the system location doesn't exist.
+ if self.dm.fileExists(self.remote_settings_json):
+ self.dm.removeFile(self.remote_settings_json)
+
+ # Delete existing settings db and create a new empty one to force new
+ # settings to be loaded.
+ self.dm.removeFile(self.remote_settings_db)
+ self.dm.shellCheckOutput(['touch', self.remote_settings_db])
+
+ # On devices, the webapps are located in /data/local/webapps instead of the profile.
+ # In some cases we may need to replace the existing webapps, in others we may just
+ # need to leave them in the profile. If the system app is present in the profile
+ # webapps, it's a good indication that they should replace the existing ones wholesale.
+ profile_webapps = posixpath.join(self.remote_profile, 'webapps')
+ if self.dm.dirExists(posixpath.join(profile_webapps, 'system.gaiamobile.org')):
+ self.dm.removeDir(self.remote_webapps_dir)
+ self.dm.moveTree(profile_webapps, self.remote_webapps_dir)
+
+ # On devices extensions are installed in the system dir
+ extension_dir = os.path.join(profile.profile, 'extensions', 'staged')
+ if os.path.isdir(extension_dir):
+ # Copy the extensions to the B2G bundles dir.
+ for filename in os.listdir(extension_dir):
+ path = posixpath.join(self.remote_bundles_dir, filename)
+ if self.dm.fileExists(path):
+ self.dm.removeFile(path)
+ self.dm.pushDir(extension_dir, self.remote_bundles_dir)
+
+ def cleanup_profile(self):
+ # Delete any bundled extensions
+ extension_dir = posixpath.join(self.remote_profile, 'extensions', 'staged')
+ if self.dm.dirExists(extension_dir):
+ for filename in self.dm.listFiles(extension_dir):
+ try:
+ self.dm.removeDir(posixpath.join(self.remote_bundles_dir, filename))
+ except DMError:
+ pass
+
+ if self.dm.fileExists(posixpath.join(self.remote_profile, 'settings.json')):
+ # Force settings.db to be restored to defaults
+ self.dm.removeFile(self.remote_settings_db)
+ self.dm.shellCheckOutput(['touch', self.remote_settings_db])
+
+
+class FirefoxContext(object):
+ profile_class = FirefoxProfile
+
+
+class ThunderbirdContext(object):
+ profile_class = ThunderbirdProfile
+
+
+class MetroContext(object):
+ profile_class = MetroFirefoxProfile
+
+ def __init__(self, binary=None):
+ self.binary = binary or os.environ.get('BROWSER_PATH', None)
+
+ def wrap_command(self, command):
+ immersive_helper_path = os.path.join(os.path.dirname(here),
+ 'resources',
+ 'metrotestharness.exe')
+ command[:0] = [immersive_helper_path, '-firefoxpath']
+ return command
diff --git a/testing/mozbase/mozrunner/mozrunner/base/__init__.py b/testing/mozbase/mozrunner/mozrunner/base/__init__.py
new file mode 100644
index 000000000..603d08f76
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/base/__init__.py
@@ -0,0 +1,5 @@
+from .runner import BaseRunner
+from .device import DeviceRunner, FennecRunner
+from .browser import GeckoRuntimeRunner
+
+__all__ = ['BaseRunner', 'DeviceRunner', 'FennecRunner', 'GeckoRuntimeRunner']
diff --git a/testing/mozbase/mozrunner/mozrunner/base/browser.py b/testing/mozbase/mozrunner/mozrunner/base/browser.py
new file mode 100644
index 000000000..998e4ccc5
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/base/browser.py
@@ -0,0 +1,80 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozinfo
+import os
+import sys
+
+from .runner import BaseRunner
+
+
+class GeckoRuntimeRunner(BaseRunner):
+ """
+ The base runner class used for local gecko runtime binaries,
+ such as Firefox and Thunderbird.
+ """
+
+ def __init__(self, binary, cmdargs=None, **runner_args):
+ self.show_crash_reporter = runner_args.pop('show_crash_reporter', False)
+ BaseRunner.__init__(self, **runner_args)
+
+ self.binary = binary
+ self.cmdargs = cmdargs or []
+
+ # allows you to run an instance of Firefox separately from any other instances
+ self.env['MOZ_NO_REMOTE'] = '1'
+ # keeps Firefox attached to the terminal window after it starts
+ self.env['NO_EM_RESTART'] = '1'
+
+ # Disable crash reporting dialogs that interfere with debugging
+ self.env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
+ self.env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
+
+ # set the library path if needed on linux
+ if sys.platform == 'linux2' and self.binary.endswith('-bin'):
+ dirname = os.path.dirname(self.binary)
+ if os.environ.get('LD_LIBRARY_PATH', None):
+ self.env['LD_LIBRARY_PATH'] = '%s:%s' % (os.environ['LD_LIBRARY_PATH'], dirname)
+ else:
+ self.env['LD_LIBRARY_PATH'] = dirname
+
+ @property
+ def command(self):
+ command = [self.binary, '-profile', self.profile.profile]
+
+ _cmdargs = [i for i in self.cmdargs
+ if i != '-foreground']
+ if len(_cmdargs) != len(self.cmdargs):
+ # foreground should be last; see
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=625614
+ self.cmdargs = _cmdargs
+ self.cmdargs.append('-foreground')
+ if mozinfo.isMac and '-foreground' not in self.cmdargs:
+ # runner should specify '-foreground' on Mac; see
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=916512
+ self.cmdargs.append('-foreground')
+
+ # Bug 775416 - Ensure that binary options are passed in first
+ command[1:1] = self.cmdargs
+
+ if hasattr(self.app_ctx, 'wrap_command'):
+ command = self.app_ctx.wrap_command(command)
+ return command
+
+ def start(self, *args, **kwargs):
+ # ensure the profile exists
+ if not self.profile.exists():
+ self.profile.reset()
+ assert self.profile.exists(), "%s : failure to reset profile" % self.__class__.__name__
+
+ has_debugger = "debug_args" in kwargs and kwargs["debug_args"]
+ if has_debugger:
+ self.env["MOZ_CRASHREPORTER_DISABLE"] = "1"
+ else:
+ if not self.show_crash_reporter:
+ # hide the crash reporter window
+ self.env["MOZ_CRASHREPORTER_NO_REPORT"] = "1"
+ self.env["MOZ_CRASHREPORTER"] = "1"
+
+ BaseRunner.start(self, *args, **kwargs)
diff --git a/testing/mozbase/mozrunner/mozrunner/base/device.py b/testing/mozbase/mozrunner/mozrunner/base/device.py
new file mode 100644
index 000000000..2252203d1
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/base/device.py
@@ -0,0 +1,185 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function
+
+import datetime
+import re
+import signal
+import sys
+import tempfile
+import time
+
+import mozfile
+
+from .runner import BaseRunner
+from ..devices import BaseEmulator
+
+
+class DeviceRunner(BaseRunner):
+ """
+ The base runner class used for running gecko on
+ remote devices (or emulators), such as B2G.
+ """
+ env = {'MOZ_CRASHREPORTER': '1',
+ 'MOZ_CRASHREPORTER_NO_REPORT': '1',
+ 'MOZ_CRASHREPORTER_SHUTDOWN': '1',
+ 'MOZ_HIDE_RESULTS_TABLE': '1',
+ 'MOZ_LOG': 'signaling:3,mtransport:4,DataChannel:4,jsep:4,MediaPipelineFactory:4',
+ 'R_LOG_LEVEL': '6',
+ 'R_LOG_DESTINATION': 'stderr',
+ 'R_LOG_VERBOSE': '1',
+ 'NO_EM_RESTART': '1', }
+
+ def __init__(self, device_class, device_args=None, **kwargs):
+ process_log = tempfile.NamedTemporaryFile(suffix='pidlog')
+ # the env will be passed to the device, it is not a *real* env
+ self._device_env = dict(DeviceRunner.env)
+ self._device_env['MOZ_PROCESS_LOG'] = process_log.name
+ # be sure we do not pass env to the parent class ctor
+ env = kwargs.pop('env', None)
+ if env:
+ self._device_env.update(env)
+
+ process_args = {'stream': sys.stdout,
+ 'processOutputLine': self.on_output,
+ 'onFinish': self.on_finish,
+ 'onTimeout': self.on_timeout}
+ process_args.update(kwargs.get('process_args') or {})
+
+ kwargs['process_args'] = process_args
+ BaseRunner.__init__(self, **kwargs)
+
+ device_args = device_args or {}
+ self.device = device_class(**device_args)
+
+ @property
+ def command(self):
+ cmd = [self.app_ctx.adb]
+ if self.app_ctx.dm._deviceSerial:
+ cmd.extend(['-s', self.app_ctx.dm._deviceSerial])
+ cmd.append('shell')
+ for k, v in self._device_env.iteritems():
+ cmd.append('%s=%s' % (k, v))
+ cmd.append(self.app_ctx.remote_binary)
+ return cmd
+
+ def start(self, *args, **kwargs):
+ if isinstance(self.device, BaseEmulator) and not self.device.connected:
+ self.device.start()
+ self.device.connect()
+ self.device.setup_profile(self.profile)
+
+ # TODO: this doesn't work well when the device is running but dropped
+ # wifi for some reason. It would be good to probe the state of the device
+ # to see if we have the homescreen running, or something, before waiting here
+ self.device.wait_for_net()
+
+ if not self.device.wait_for_net():
+ raise Exception("Network did not come up when starting device")
+
+ pid = BaseRunner.start(self, *args, **kwargs)
+
+ timeout = 10 # seconds
+ starttime = datetime.datetime.now()
+ while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout):
+ if self.is_running():
+ break
+ time.sleep(1)
+ else:
+ print("timed out waiting for '%s' process to start" % self.app_ctx.remote_process)
+
+ if not self.device.wait_for_net():
+ raise Exception("Failed to get a network connection")
+ return pid
+
+ def stop(self, sig=None):
+ def _wait_for_shutdown(pid, timeout=10):
+ start_time = datetime.datetime.now()
+ end_time = datetime.timedelta(seconds=timeout)
+ while datetime.datetime.now() - start_time < end_time:
+ if self.is_running() != pid:
+ return True
+ time.sleep(1)
+ return False
+
+ remote_pid = self.is_running()
+ if remote_pid:
+ self.app_ctx.dm.killProcess(
+ self.app_ctx.remote_process, sig=sig)
+ if not _wait_for_shutdown(remote_pid) and sig is not None:
+ print("timed out waiting for '%s' process to exit, trying "
+ "without signal {}".format(
+ self.app_ctx.remote_process, sig))
+
+ # need to call adb stop otherwise the system will attempt to
+ # restart the process
+ remote_pid = self.is_running() or remote_pid
+ self.app_ctx.stop_application()
+ if not _wait_for_shutdown(remote_pid):
+ print("timed out waiting for '%s' process to exit".format(
+ self.app_ctx.remote_process))
+
+ def is_running(self):
+ return self.app_ctx.dm.processExist(self.app_ctx.remote_process)
+
+ def on_output(self, line):
+ match = re.findall(r"TEST-START \| ([^\s]*)", line)
+ if match:
+ self.last_test = match[-1]
+
+ def on_timeout(self):
+ self.stop(sig=signal.SIGABRT)
+ msg = "DeviceRunner TEST-UNEXPECTED-FAIL | %s | application timed out after %s seconds"
+ if self.timeout:
+ timeout = self.timeout
+ else:
+ timeout = self.output_timeout
+ msg = "%s with no output" % msg
+
+ print(msg % (self.last_test, timeout))
+ self.check_for_crashes()
+
+ def on_finish(self):
+ self.check_for_crashes()
+
+ def check_for_crashes(self, dump_save_path=None, test_name=None, **kwargs):
+ test_name = test_name or self.last_test
+ dump_dir = self.device.pull_minidumps()
+ crashed = BaseRunner.check_for_crashes(
+ self,
+ dump_directory=dump_dir,
+ dump_save_path=dump_save_path,
+ test_name=test_name,
+ **kwargs)
+ mozfile.remove(dump_dir)
+ return crashed
+
+ def cleanup(self, *args, **kwargs):
+ BaseRunner.cleanup(self, *args, **kwargs)
+ self.device.cleanup()
+
+
+class FennecRunner(DeviceRunner):
+
+ def __init__(self, cmdargs=None, **kwargs):
+ super(FennecRunner, self).__init__(**kwargs)
+ self.cmdargs = cmdargs or []
+
+ @property
+ def command(self):
+ cmd = [self.app_ctx.adb]
+ if self.app_ctx.dm._deviceSerial:
+ cmd.extend(["-s", self.app_ctx.dm._deviceSerial])
+ cmd.append("shell")
+ app = "%s/org.mozilla.gecko.BrowserApp" % self.app_ctx.remote_process
+ am_subcommand = ["am", "start", "-a", "android.activity.MAIN", "-n", app]
+ app_params = ["-no-remote", "-profile", self.app_ctx.remote_profile]
+ app_params.extend(self.cmdargs)
+ am_subcommand.extend(["--es", "args", "'%s'" % " ".join(app_params)])
+ # Append env variables in the form |--es env0 MOZ_CRASHREPORTER=1|
+ for (count, (k, v)) in enumerate(self._device_env.iteritems()):
+ am_subcommand.extend(["--es", "env%d" % count, "%s=%s" % (k, v)])
+ cmd.append("%s" % " ".join(am_subcommand))
+ return cmd
diff --git a/testing/mozbase/mozrunner/mozrunner/base/runner.py b/testing/mozbase/mozrunner/mozrunner/base/runner.py
new file mode 100644
index 000000000..98628f6f3
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/base/runner.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from abc import ABCMeta, abstractproperty
+import os
+import subprocess
+import traceback
+
+from mozlog import get_default_logger
+from mozprocess import ProcessHandler
+try:
+ import mozcrash
+except ImportError:
+ mozcrash = None
+
+from ..application import DefaultContext
+from ..errors import RunnerNotStartedError
+
+
+class BaseRunner(object):
+ """
+ The base runner class for all mozrunner objects, both local and remote.
+ """
+ __metaclass__ = ABCMeta
+ last_test = 'mozrunner-startup'
+ process_handler = None
+ timeout = None
+ output_timeout = None
+
+ def __init__(self, app_ctx=None, profile=None, clean_profile=True, env=None,
+ process_class=None, process_args=None, symbols_path=None,
+ dump_save_path=None, addons=None):
+ self.app_ctx = app_ctx or DefaultContext()
+
+ if isinstance(profile, basestring):
+ self.profile = self.app_ctx.profile_class(profile=profile,
+ addons=addons)
+ else:
+ self.profile = profile or self.app_ctx.profile_class(**getattr(self.app_ctx,
+ 'profile_args', {}))
+
+ self.logger = get_default_logger()
+
+ # process environment
+ if env is None:
+ self.env = os.environ.copy()
+ else:
+ self.env = env.copy()
+
+ self.clean_profile = clean_profile
+ self.process_class = process_class or ProcessHandler
+ self.process_args = process_args or {}
+ self.symbols_path = symbols_path
+ self.dump_save_path = dump_save_path
+
+ self.crashed = 0
+
+ def __del__(self):
+ self.cleanup()
+
+ @abstractproperty
+ def command(self):
+ """Returns the command list to run."""
+ pass
+
+ @property
+ def returncode(self):
+ """
+ The returncode of the process_handler. A value of None
+ indicates the process is still running. A negative
+ value indicates the process was killed with the
+ specified signal.
+
+ :raises: RunnerNotStartedError
+ """
+ if self.process_handler:
+ return self.process_handler.poll()
+ else:
+ raise RunnerNotStartedError("returncode accessed before runner started")
+
+ def start(self, debug_args=None, interactive=False, timeout=None, outputTimeout=None):
+ """
+ Run self.command in the proper environment.
+
+ :param debug_args: arguments for a debugger
+ :param interactive: uses subprocess.Popen directly
+ :param timeout: see process_handler.run()
+ :param outputTimeout: see process_handler.run()
+ :returns: the process id
+ """
+ self.timeout = timeout
+ self.output_timeout = outputTimeout
+ cmd = self.command
+
+ # ensure the runner is stopped
+ self.stop()
+
+ # attach a debugger, if specified
+ if debug_args:
+ cmd = list(debug_args) + cmd
+
+ if self.logger:
+ self.logger.info('Application command: %s' % ' '.join(cmd))
+ if interactive:
+ self.process_handler = subprocess.Popen(cmd, env=self.env)
+ # TODO: other arguments
+ else:
+ # this run uses the managed processhandler
+ self.process_handler = self.process_class(cmd, env=self.env, **self.process_args)
+ self.process_handler.run(self.timeout, self.output_timeout)
+
+ self.crashed = 0
+ return self.process_handler.pid
+
+ def wait(self, timeout=None):
+ """
+ Wait for the process to exit.
+
+ :param timeout: if not None, will return after timeout seconds.
+ Timeout is ignored if interactive was set to True.
+ :returns: the process return code if process exited normally,
+ -<signal> if process was killed (Unix only),
+ None if timeout was reached and the process is still running.
+ :raises: RunnerNotStartedError
+ """
+ if self.is_running():
+ # The interactive mode uses directly a Popen process instance. It's
+ # wait() method doesn't have any parameters. So handle it separately.
+ if isinstance(self.process_handler, subprocess.Popen):
+ self.process_handler.wait()
+ else:
+ self.process_handler.wait(timeout)
+
+ elif not self.process_handler:
+ raise RunnerNotStartedError("Wait() called before process started")
+
+ return self.returncode
+
+ def is_running(self):
+ """
+ Checks if the process is running.
+
+ :returns: True if the process is active
+ """
+ return self.returncode is None
+
+ def stop(self, sig=None):
+ """
+ Kill the process.
+
+ :param sig: Signal used to kill the process, defaults to SIGKILL
+ (has no effect on Windows).
+ :returns: the process return code if process was already stopped,
+ -<signal> if process was killed (Unix only)
+ :raises: RunnerNotStartedError
+ """
+ try:
+ if not self.is_running():
+ return self.returncode
+ except RunnerNotStartedError:
+ return
+
+ # The interactive mode uses directly a Popen process instance. It's
+ # kill() method doesn't have any parameters. So handle it separately.
+ if isinstance(self.process_handler, subprocess.Popen):
+ self.process_handler.kill()
+ else:
+ self.process_handler.kill(sig=sig)
+
+ return self.returncode
+
+ def reset(self):
+ """
+ Reset the runner to its default state.
+ """
+ self.stop()
+ self.process_handler = None
+
+ def check_for_crashes(self, dump_directory=None, dump_save_path=None,
+ test_name=None, quiet=False):
+ """Check for possible crashes and output the stack traces.
+
+ :param dump_directory: Directory to search for minidump files
+ :param dump_save_path: Directory to save the minidump files to
+ :param test_name: Name to use in the crash output
+ :param quiet: If `True` don't print the PROCESS-CRASH message to stdout
+
+ :returns: Number of crashes which have been detected since the last invocation
+ """
+ crash_count = 0
+
+ if not dump_directory:
+ dump_directory = os.path.join(self.profile.profile, 'minidumps')
+
+ if not dump_save_path:
+ dump_save_path = self.dump_save_path
+
+ if not test_name:
+ test_name = "runner.py"
+
+ try:
+ if self.logger:
+ if mozcrash:
+ crash_count = mozcrash.log_crashes(
+ self.logger,
+ dump_directory,
+ self.symbols_path,
+ dump_save_path=dump_save_path,
+ test=test_name)
+ else:
+ self.logger.warning("Can not log crashes without mozcrash")
+ else:
+ if mozcrash:
+ crash_count = mozcrash.check_for_crashes(
+ dump_directory,
+ self.symbols_path,
+ dump_save_path=dump_save_path,
+ test_name=test_name,
+ quiet=quiet)
+
+ self.crashed += crash_count
+ except:
+ traceback.print_exc()
+
+ return crash_count
+
+ def cleanup(self):
+ """
+ Cleanup all runner state
+ """
+ self.stop()
diff --git a/testing/mozbase/mozrunner/mozrunner/cli.py b/testing/mozbase/mozrunner/mozrunner/cli.py
new file mode 100644
index 000000000..9b340edb6
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/cli.py
@@ -0,0 +1,152 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+from mozprofile import MozProfileCLI
+
+from .application import get_app_context
+from .runners import runners
+from .utils import findInPath
+
+# Map of debugging programs to information about them
+# from http://mxr.mozilla.org/mozilla-central/source/build/automationutils.py#59
+DEBUGGERS = {'gdb': {'interactive': True,
+ 'args': ['-q', '--args'], },
+ 'valgrind': {'interactive': False,
+ 'args': ['--leak-check=full']}
+ }
+
+
+def debugger_arguments(debugger, arguments=None, interactive=None):
+ """Finds debugger arguments from debugger given and defaults
+
+ :param debugger: name or path to debugger
+ :param arguments: arguments for the debugger, or None to use defaults
+ :param interactive: whether the debugger should run in interactive mode
+
+ """
+ # find debugger executable if not a file
+ executable = debugger
+ if not os.path.exists(executable):
+ executable = findInPath(debugger)
+ if executable is None:
+ raise Exception("Path to '%s' not found" % debugger)
+
+ # if debugger not in dictionary of knowns return defaults
+ dirname, debugger = os.path.split(debugger)
+ if debugger not in DEBUGGERS:
+ return ([executable] + (arguments or []), bool(interactive))
+
+ # otherwise use the dictionary values for arguments unless specified
+ if arguments is None:
+ arguments = DEBUGGERS[debugger].get('args', [])
+ if interactive is None:
+ interactive = DEBUGGERS[debugger].get('interactive', False)
+ return ([executable] + arguments, interactive)
+
+
+class CLI(MozProfileCLI):
+ """Command line interface"""
+
+ module = "mozrunner"
+
+ def __init__(self, args=sys.argv[1:]):
+ MozProfileCLI.__init__(self, args=args)
+
+ # choose appropriate runner and profile classes
+ app = self.options.app
+ try:
+ self.runner_class = runners[app]
+ self.profile_class = get_app_context(app).profile_class
+ except KeyError:
+ self.parser.error('Application "%s" unknown (should be one of "%s")' %
+ (app, ', '.join(runners.keys())))
+
+ def add_options(self, parser):
+ """add options to the parser"""
+ parser.description = ("Reliable start/stop/configuration of Mozilla"
+ " Applications (Firefox, Thunderbird, etc.)")
+
+ # add profile options
+ MozProfileCLI.add_options(self, parser)
+
+ # add runner options
+ parser.add_option('-b', "--binary",
+ dest="binary", help="Binary path.",
+ metavar=None, default=None)
+ parser.add_option('--app', dest='app', default='firefox',
+ help="Application to use [DEFAULT: %default]")
+ parser.add_option('--app-arg', dest='appArgs',
+ default=[], action='append',
+ help="provides an argument to the test application")
+ parser.add_option('--debugger', dest='debugger',
+ help="run under a debugger, e.g. gdb or valgrind")
+ parser.add_option('--debugger-args', dest='debugger_args',
+ action='store',
+ help="arguments to the debugger")
+ parser.add_option('--interactive', dest='interactive',
+ action='store_true',
+ help="run the program interactively")
+
+ # methods for running
+
+ def command_args(self):
+ """additional arguments for the mozilla application"""
+ return map(os.path.expanduser, self.options.appArgs)
+
+ def runner_args(self):
+ """arguments to instantiate the runner class"""
+ return dict(cmdargs=self.command_args(),
+ binary=self.options.binary)
+
+ def create_runner(self):
+ profile = self.profile_class(**self.profile_args())
+ return self.runner_class(profile=profile, **self.runner_args())
+
+ def run(self):
+ runner = self.create_runner()
+ self.start(runner)
+ runner.cleanup()
+
+ def debugger_arguments(self):
+ """Get the debugger arguments
+
+ returns a 2-tuple of debugger arguments:
+ (debugger_arguments, interactive)
+
+ """
+ debug_args = self.options.debugger_args
+ if debug_args is not None:
+ debug_args = debug_args.split()
+ interactive = self.options.interactive
+ if self.options.debugger:
+ debug_args, interactive = debugger_arguments(self.options.debugger, debug_args,
+ interactive)
+ return debug_args, interactive
+
+ def start(self, runner):
+ """Starts the runner and waits for the application to exit
+
+ It can also happen via a keyboard interrupt. It should be
+ overwritten to provide custom running of the runner instance.
+
+ """
+ # attach a debugger if specified
+ debug_args, interactive = self.debugger_arguments()
+ runner.start(debug_args=debug_args, interactive=interactive)
+ print 'Starting: ' + ' '.join(runner.command)
+ try:
+ runner.wait()
+ except KeyboardInterrupt:
+ runner.stop()
+
+
+def cli(args=sys.argv[1:]):
+ CLI(args).run()
+
+
+if __name__ == '__main__':
+ cli()
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/__init__.py b/testing/mozbase/mozrunner/mozrunner/devices/__init__.py
new file mode 100644
index 000000000..bdb7586c9
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/__init__.py
@@ -0,0 +1,13 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from emulator import BaseEmulator, Emulator, EmulatorAVD
+from base import Device
+
+import emulator_battery
+import emulator_geo
+import emulator_screen
+
+__all__ = ['BaseEmulator', 'Emulator', 'EmulatorAVD', 'Device',
+ 'emulator_battery', 'emulator_geo', 'emulator_screen']
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/android_device.py b/testing/mozbase/mozrunner/mozrunner/devices/android_device.py
new file mode 100644
index 000000000..0052f473c
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/android_device.py
@@ -0,0 +1,773 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import fileinput
+import glob
+import os
+import platform
+import psutil
+import shutil
+import signal
+import sys
+import telnetlib
+import time
+import urlparse
+import urllib2
+from distutils.spawn import find_executable
+
+from mozdevice import DeviceManagerADB, DMError
+from mozprocess import ProcessHandler
+
+EMULATOR_HOME_DIR = os.path.join(os.path.expanduser('~'), '.mozbuild', 'android-device')
+
+EMULATOR_AUTH_FILE = os.path.join(os.path.expanduser('~'), '.emulator_console_auth_token')
+
+TOOLTOOL_URL = 'https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py'
+
+TRY_URL = 'https://hg.mozilla.org/try/raw-file/default'
+
+MANIFEST_PATH = 'testing/config/tooltool-manifests'
+
+verbose_logging = False
+
+
+class AvdInfo(object):
+ """
+ Simple class to contain an AVD description.
+ """
+
+ def __init__(self, description, name, tooltool_manifest, extra_args,
+ port):
+ self.description = description
+ self.name = name
+ self.tooltool_manifest = tooltool_manifest
+ self.extra_args = extra_args
+ self.port = port
+
+
+"""
+ A dictionary to map an AVD type to a description of that type of AVD.
+
+ There is one entry for each type of AVD used in Mozilla automated tests
+ and the parameters for each reflect those used in mozharness.
+"""
+AVD_DICT = {
+ '4.3': AvdInfo('Android 4.3',
+ 'mozemulator-4.3',
+ 'testing/config/tooltool-manifests/androidarm_4_3/releng.manifest',
+ ['-show-kernel', '-debug',
+ 'init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket'],
+ 5554),
+ '6.0': AvdInfo('Android 6.0',
+ 'mozemulator-6.0',
+ 'testing/config/tooltool-manifests/androidarm_6_0/releng.manifest',
+ ['-show-kernel', '-debug',
+ 'init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket'],
+ 5554),
+ 'x86': AvdInfo('Android 4.2 x86',
+ 'mozemulator-x86',
+ 'testing/config/tooltool-manifests/androidx86/releng.manifest',
+ ['-debug',
+ 'init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket',
+ '-qemu', '-m', '1024', '-enable-kvm'],
+ 5554)
+}
+
+
+def verify_android_device(build_obj, install=False, xre=False, debugger=False, verbose=False):
+ """
+ Determine if any Android device is connected via adb.
+ If no device is found, prompt to start an emulator.
+ If a device is found or an emulator started and 'install' is
+ specified, also check whether Firefox is installed on the
+ device; if not, prompt to install Firefox.
+ If 'xre' is specified, also check with MOZ_HOST_BIN is set
+ to a valid xre/host-utils directory; if not, prompt to set
+ one up.
+ If 'debugger' is specified, also check that JimDB is installed;
+ if JimDB is not found, prompt to set up JimDB.
+ Returns True if the emulator was started or another device was
+ already connected.
+ """
+ device_verified = False
+ emulator = AndroidEmulator('*', substs=build_obj.substs, verbose=verbose)
+ devices = emulator.dm.devices()
+ if (len(devices) > 0) and ('device' in [d[1] for d in devices]):
+ device_verified = True
+ elif emulator.is_available():
+ response = raw_input(
+ "No Android devices connected. Start an emulator? (Y/n) ").strip()
+ if response.lower().startswith('y') or response == '':
+ if not emulator.check_avd():
+ _log_info("Fetching AVD. This may take a while...")
+ emulator.update_avd()
+ _log_info("Starting emulator running %s..." %
+ emulator.get_avd_description())
+ emulator.start()
+ emulator.wait_for_start()
+ device_verified = True
+
+ if device_verified and install:
+ # Determine if Firefox is installed on the device; if not,
+ # prompt to install. This feature allows a test command to
+ # launch an emulator, install Firefox, and proceed with testing
+ # in one operation. It is also a basic safeguard against other
+ # cases where testing is requested but Firefox installation has
+ # been forgotten.
+ # If Firefox is installed, there is no way to determine whether
+ # the current build is installed, and certainly no way to
+ # determine if the installed build is the desired build.
+ # Installing every time is problematic because:
+ # - it prevents testing against other builds (downloaded apk)
+ # - installation may take a couple of minutes.
+ installed = emulator.dm.shellCheckOutput(['pm', 'list',
+ 'packages', 'org.mozilla.'])
+ if 'fennec' not in installed and 'firefox' not in installed:
+ response = raw_input(
+ "It looks like Firefox is not installed on this device.\n"
+ "Install Firefox? (Y/n) ").strip()
+ if response.lower().startswith('y') or response == '':
+ _log_info("Installing Firefox. This may take a while...")
+ build_obj._run_make(directory=".", target='install',
+ ensure_exit_code=False)
+
+ if device_verified and xre:
+ # Check whether MOZ_HOST_BIN has been set to a valid xre; if not,
+ # prompt to install one.
+ xre_path = os.environ.get('MOZ_HOST_BIN')
+ err = None
+ if not xre_path:
+ err = "environment variable MOZ_HOST_BIN is not set to a directory" \
+ "containing host xpcshell"
+ elif not os.path.isdir(xre_path):
+ err = '$MOZ_HOST_BIN does not specify a directory'
+ elif not os.path.isfile(os.path.join(xre_path, 'xpcshell')):
+ err = '$MOZ_HOST_BIN/xpcshell does not exist'
+ if err:
+ xre_path = glob.glob(os.path.join(EMULATOR_HOME_DIR, 'host-utils*'))
+ for path in xre_path:
+ if os.path.isdir(path) and os.path.isfile(os.path.join(path, 'xpcshell')):
+ os.environ['MOZ_HOST_BIN'] = path
+ err = None
+ break
+ if err:
+ _log_info("Host utilities not found: %s" % err)
+ response = raw_input(
+ "Download and setup your host utilities? (Y/n) ").strip()
+ if response.lower().startswith('y') or response == '':
+ _log_info("Installing host utilities. This may take a while...")
+ host_platform = _get_host_platform()
+ if host_platform:
+ path = os.path.join(MANIFEST_PATH, host_platform, 'hostutils.manifest')
+ _get_tooltool_manifest(build_obj.substs, path, EMULATOR_HOME_DIR,
+ 'releng.manifest')
+ _tooltool_fetch()
+ xre_path = glob.glob(os.path.join(EMULATOR_HOME_DIR, 'host-utils*'))
+ for path in xre_path:
+ if os.path.isdir(path) and os.path.isfile(os.path.join(path, 'xpcshell')):
+ os.environ['MOZ_HOST_BIN'] = path
+ err = None
+ break
+ if err:
+ _log_warning("Unable to install host utilities.")
+ else:
+ _log_warning(
+ "Unable to install host utilities -- your platform is not supported!")
+
+ if debugger:
+ # Optionally set up JimDB. See https://wiki.mozilla.org/Mobile/Fennec/Android/GDB.
+ build_platform = _get_device_platform(build_obj.substs)
+ jimdb_path = os.path.join(EMULATOR_HOME_DIR, 'jimdb-%s' % build_platform)
+ jimdb_utils_path = os.path.join(jimdb_path, 'utils')
+ gdb_path = os.path.join(jimdb_path, 'bin', 'gdb')
+ err = None
+ if not os.path.isdir(jimdb_path):
+ err = '%s does not exist' % jimdb_path
+ elif not os.path.isfile(gdb_path):
+ err = '%s not found' % gdb_path
+ if err:
+ _log_info("JimDB (%s) not found: %s" % (build_platform, err))
+ response = raw_input(
+ "Download and setup JimDB (%s)? (Y/n) " % build_platform).strip()
+ if response.lower().startswith('y') or response == '':
+ host_platform = _get_host_platform()
+ if host_platform:
+ _log_info(
+ "Installing JimDB (%s/%s). This may take a while..." % (host_platform,
+ build_platform))
+ path = os.path.join(MANIFEST_PATH, host_platform,
+ 'jimdb-%s.manifest' % build_platform)
+ _get_tooltool_manifest(build_obj.substs, path,
+ EMULATOR_HOME_DIR, 'releng.manifest')
+ _tooltool_fetch()
+ if os.path.isfile(gdb_path):
+ # Get JimDB utilities from git repository
+ proc = ProcessHandler(['git', 'pull'], cwd=jimdb_utils_path)
+ proc.run()
+ git_pull_complete = False
+ try:
+ proc.wait()
+ if proc.proc.returncode == 0:
+ git_pull_complete = True
+ except:
+ if proc.poll() is None:
+ proc.kill(signal.SIGTERM)
+ if not git_pull_complete:
+ _log_warning("Unable to update JimDB utils from git -- "
+ "some JimDB features may be unavailable.")
+ else:
+ _log_warning("Unable to install JimDB -- unable to fetch from tooltool.")
+ else:
+ _log_warning("Unable to install JimDB -- your platform is not supported!")
+ if os.path.isfile(gdb_path):
+ # sync gdbinit.local with build settings
+ _update_gdbinit(build_obj.substs, os.path.join(jimdb_utils_path, "gdbinit.local"))
+ # ensure JimDB is in system path, so that mozdebug can find it
+ bin_path = os.path.join(jimdb_path, 'bin')
+ os.environ['PATH'] = "%s:%s" % (bin_path, os.environ['PATH'])
+
+ return device_verified
+
+
+def run_firefox_for_android(build_obj, params):
+ """
+ Launch Firefox for Android on the connected device.
+ Optional 'params' allow parameters to be passed to Firefox.
+ """
+ adb_path = _find_sdk_exe(build_obj.substs, 'adb', False)
+ if not adb_path:
+ adb_path = 'adb'
+ dm = DeviceManagerADB(autoconnect=False, adbPath=adb_path, retryLimit=1)
+ try:
+ #
+ # Construct an adb command similar to:
+ #
+ # $ adb shell am start -a android.activity.MAIN \
+ # -n org.mozilla.fennec_$USER \
+ # -d <url param> \
+ # --es args "<params>"
+ #
+ app = "%s/org.mozilla.gecko.BrowserApp" % build_obj.substs['ANDROID_PACKAGE_NAME']
+ cmd = ['am', 'start', '-a', 'android.activity.MAIN', '-n', app]
+ if params:
+ for p in params:
+ if urlparse.urlparse(p).scheme != "":
+ cmd.extend(['-d', p])
+ params.remove(p)
+ break
+ if params:
+ cmd.extend(['--es', 'args', '"%s"' % ' '.join(params)])
+ _log_debug(cmd)
+ output = dm.shellCheckOutput(cmd, timeout=10)
+ _log_info(output)
+ except DMError:
+ _log_warning("unable to launch Firefox for Android")
+ return 1
+ return 0
+
+
+def grant_runtime_permissions(build_obj):
+ """
+ Grant required runtime permissions to the specified app
+ (typically org.mozilla.fennec_$USER).
+ """
+ app = build_obj.substs['ANDROID_PACKAGE_NAME']
+ adb_path = _find_sdk_exe(build_obj.substs, 'adb', False)
+ if not adb_path:
+ adb_path = 'adb'
+ dm = DeviceManagerADB(autoconnect=False, adbPath=adb_path, retryLimit=1)
+ dm.default_timeout = 10
+ try:
+ sdk_level = dm.shellCheckOutput(['getprop', 'ro.build.version.sdk'])
+ if sdk_level and int(sdk_level) >= 23:
+ _log_info("Granting important runtime permissions to %s" % app)
+ dm.shellCheckOutput(['pm', 'grant', app, 'android.permission.WRITE_EXTERNAL_STORAGE'])
+ dm.shellCheckOutput(['pm', 'grant', app, 'android.permission.READ_EXTERNAL_STORAGE'])
+ dm.shellCheckOutput(['pm', 'grant', app, 'android.permission.ACCESS_FINE_LOCATION'])
+ dm.shellCheckOutput(['pm', 'grant', app, 'android.permission.CAMERA'])
+ dm.shellCheckOutput(['pm', 'grant', app, 'android.permission.WRITE_CONTACTS'])
+ except DMError:
+ _log_warning("Unable to grant runtime permissions to %s" % app)
+
+
+class AndroidEmulator(object):
+
+ """
+ Support running the Android emulator with an AVD from Mozilla
+ test automation.
+
+ Example usage:
+ emulator = AndroidEmulator()
+ if not emulator.is_running() and emulator.is_available():
+ if not emulator.check_avd():
+ warn("this may take a while...")
+ emulator.update_avd()
+ emulator.start()
+ emulator.wait_for_start()
+ emulator.wait()
+ """
+
+ def __init__(self, avd_type='4.3', verbose=False, substs=None, device_serial=None):
+ global verbose_logging
+ self.emulator_log = None
+ self.emulator_path = 'emulator'
+ verbose_logging = verbose
+ self.substs = substs
+ self.avd_type = self._get_avd_type(avd_type)
+ self.avd_info = AVD_DICT[self.avd_type]
+ self.gpu = True
+ self.restarted = False
+ adb_path = _find_sdk_exe(substs, 'adb', False)
+ if not adb_path:
+ adb_path = 'adb'
+ self.dm = DeviceManagerADB(autoconnect=False, adbPath=adb_path, retryLimit=1,
+ deviceSerial=device_serial)
+ self.dm.default_timeout = 10
+ _log_debug("Emulator created with type %s" % self.avd_type)
+
+ def __del__(self):
+ if self.emulator_log:
+ self.emulator_log.close()
+
+ def is_running(self):
+ """
+ Returns True if the Android emulator is running.
+ """
+ for proc in psutil.process_iter():
+ name = proc.name()
+ # On some platforms, "emulator" may start an emulator with
+ # process name "emulator64-arm" or similar.
+ if name and name.startswith('emulator'):
+ return True
+ return False
+
+ def is_available(self):
+ """
+ Returns True if an emulator executable is found.
+ """
+ found = False
+ emulator_path = _find_sdk_exe(self.substs, 'emulator', True)
+ if emulator_path:
+ self.emulator_path = emulator_path
+ found = True
+ return found
+
+ def check_avd(self, force=False):
+ """
+ Determine if the AVD is already installed locally.
+ (This is usually used to determine if update_avd() is likely
+ to require a download; it is a convenient way of determining
+ whether a 'this may take a while' warning is warranted.)
+
+ Returns True if the AVD is installed.
+ """
+ avd = os.path.join(
+ EMULATOR_HOME_DIR, 'avd', self.avd_info.name + '.avd')
+ if force and os.path.exists(avd):
+ shutil.rmtree(avd)
+ if os.path.exists(avd):
+ _log_debug("AVD found at %s" % avd)
+ return True
+ return False
+
+ def update_avd(self, force=False):
+ """
+ If required, update the AVD via tooltool.
+
+ If the AVD directory is not found, or "force" is requested,
+ download the tooltool manifest associated with the AVD and then
+ invoke tooltool.py on the manifest. tooltool.py will download the
+ required archive (unless already present in the local tooltool
+ cache) and install the AVD.
+ """
+ avd = os.path.join(
+ EMULATOR_HOME_DIR, 'avd', self.avd_info.name + '.avd')
+ ini_file = os.path.join(
+ EMULATOR_HOME_DIR, 'avd', self.avd_info.name + '.ini')
+ if force and os.path.exists(avd):
+ shutil.rmtree(avd)
+ if not os.path.exists(avd):
+ if os.path.exists(ini_file):
+ os.remove(ini_file)
+ path = self.avd_info.tooltool_manifest
+ _get_tooltool_manifest(self.substs, path, EMULATOR_HOME_DIR, 'releng.manifest')
+ _tooltool_fetch()
+ self._update_avd_paths()
+
+ def start(self):
+ """
+ Launch the emulator.
+ """
+ if os.path.exists(EMULATOR_AUTH_FILE):
+ os.remove(EMULATOR_AUTH_FILE)
+ _log_debug("deleted %s" % EMULATOR_AUTH_FILE)
+ # create an empty auth file to disable emulator authentication
+ auth_file = open(EMULATOR_AUTH_FILE, 'w')
+ auth_file.close()
+
+ def outputHandler(line):
+ self.emulator_log.write("<%s>\n" % line)
+ if "Invalid value for -gpu" in line or "Invalid GPU mode" in line:
+ self.gpu = False
+ env = os.environ
+ env['ANDROID_AVD_HOME'] = os.path.join(EMULATOR_HOME_DIR, "avd")
+ command = [self.emulator_path, "-avd",
+ self.avd_info.name, "-port", "5554"]
+ if self.gpu:
+ command += ['-gpu', 'swiftshader']
+ if self.avd_info.extra_args:
+ # -enable-kvm option is not valid on OSX
+ if _get_host_platform() == 'macosx64' and '-enable-kvm' in self.avd_info.extra_args:
+ self.avd_info.extra_args.remove('-enable-kvm')
+ command += self.avd_info.extra_args
+ log_path = os.path.join(EMULATOR_HOME_DIR, 'emulator.log')
+ self.emulator_log = open(log_path, 'w')
+ _log_debug("Starting the emulator with this command: %s" %
+ ' '.join(command))
+ _log_debug("Emulator output will be written to '%s'" %
+ log_path)
+ self.proc = ProcessHandler(
+ command, storeOutput=False, processOutputLine=outputHandler,
+ env=env)
+ self.proc.run()
+ _log_debug("Emulator started with pid %d" %
+ int(self.proc.proc.pid))
+
+ def wait_for_start(self):
+ """
+ Verify that the emulator is running, the emulator device is visible
+ to adb, and Android has booted.
+ """
+ if not self.proc:
+ _log_warning("Emulator not started!")
+ return False
+ if self.check_completed():
+ return False
+ _log_debug("Waiting for device status...")
+ while(('emulator-5554', 'device') not in self.dm.devices()):
+ time.sleep(10)
+ if self.check_completed():
+ return False
+ _log_debug("Device status verified.")
+
+ _log_debug("Checking that Android has booted...")
+ complete = False
+ while(not complete):
+ output = ''
+ try:
+ output = self.dm.shellCheckOutput(
+ ['getprop', 'sys.boot_completed'], timeout=5)
+ except DMError:
+ # adb not yet responding...keep trying
+ pass
+ if output.strip() == '1':
+ complete = True
+ else:
+ time.sleep(10)
+ if self.check_completed():
+ return False
+ _log_debug("Android boot status verified.")
+
+ if not self._verify_emulator():
+ return False
+ return True
+
+ def check_completed(self):
+ if self.proc.proc.poll() is not None:
+ if not self.gpu and not self.restarted:
+ _log_warning("Emulator failed to start. Your emulator may be out of date.")
+ _log_warning("Trying to restart the emulator without -gpu argument.")
+ self.restarted = True
+ self.start()
+ return False
+ _log_warning("Emulator has already completed!")
+ log_path = os.path.join(EMULATOR_HOME_DIR, 'emulator.log')
+ _log_warning("See log at %s for more information." % log_path)
+ return True
+ return False
+
+ def wait(self):
+ """
+ Wait for the emulator to close. If interrupted, close the emulator.
+ """
+ try:
+ self.proc.wait()
+ except:
+ if self.proc.poll() is None:
+ self.cleanup()
+ return self.proc.poll()
+
+ def cleanup(self):
+ """
+ Close the emulator.
+ """
+ self.proc.kill(signal.SIGTERM)
+
+ def get_avd_description(self):
+ """
+ Return the human-friendly description of this AVD.
+ """
+ return self.avd_info.description
+
+ def _update_avd_paths(self):
+ avd_path = os.path.join(EMULATOR_HOME_DIR, "avd")
+ ini_file = os.path.join(avd_path, "test-1.ini")
+ ini_file_new = os.path.join(avd_path, self.avd_info.name + ".ini")
+ os.rename(ini_file, ini_file_new)
+ avd_dir = os.path.join(avd_path, "test-1.avd")
+ avd_dir_new = os.path.join(avd_path, self.avd_info.name + ".avd")
+ os.rename(avd_dir, avd_dir_new)
+ self._replace_ini_contents(ini_file_new)
+
+ def _replace_ini_contents(self, path):
+ with open(path, "r") as f:
+ lines = f.readlines()
+ with open(path, "w") as f:
+ for line in lines:
+ if line.startswith('path='):
+ avd_path = os.path.join(EMULATOR_HOME_DIR, "avd")
+ f.write('path=%s/%s.avd\n' %
+ (avd_path, self.avd_info.name))
+ elif line.startswith('path.rel='):
+ f.write('path.rel=avd/%s.avd\n' % self.avd_info.name)
+ else:
+ f.write(line)
+
+ def _telnet_cmd(self, telnet, command):
+ _log_debug(">>> " + command)
+ telnet.write('%s\n' % command)
+ result = telnet.read_until('OK', 10)
+ _log_debug("<<< " + result)
+ return result
+
+ def _verify_emulator(self):
+ telnet_ok = False
+ tn = None
+ while(not telnet_ok):
+ try:
+ tn = telnetlib.Telnet('localhost', self.avd_info.port, 10)
+ if tn is not None:
+ tn.read_until('OK', 10)
+ self._telnet_cmd(tn, 'avd status')
+ self._telnet_cmd(tn, 'redir list')
+ self._telnet_cmd(tn, 'network status')
+ tn.write('quit\n')
+ tn.read_all()
+ telnet_ok = True
+ else:
+ _log_warning("Unable to connect to port %d" % self.avd_info.port)
+ except:
+ _log_warning("Trying again after unexpected exception")
+ finally:
+ if tn is not None:
+ tn.close()
+ if not telnet_ok:
+ time.sleep(10)
+ if self.proc.proc.poll() is not None:
+ _log_warning("Emulator has already completed!")
+ return False
+ return telnet_ok
+
+ def _get_avd_type(self, requested):
+ if requested in AVD_DICT.keys():
+ return requested
+ if self.substs:
+ if not self.substs['TARGET_CPU'].startswith('arm'):
+ return 'x86'
+ return '4.3'
+
+
+def _find_sdk_exe(substs, exe, tools):
+ if tools:
+ subdir = 'tools'
+ else:
+ subdir = 'platform-tools'
+
+ found = False
+ if not found and substs:
+ # It's best to use the tool specified by the build, rather
+ # than something we find on the PATH or crawl for.
+ try:
+ exe_path = substs[exe.upper()]
+ if os.path.exists(exe_path):
+ found = True
+ else:
+ _log_debug(
+ "Unable to find executable at %s" % exe_path)
+ except KeyError:
+ _log_debug("%s not set" % exe.upper())
+
+ # Append '.exe' to the name on Windows if it's not present,
+ # so that the executable can be found.
+ if (os.name == 'nt' and not exe.lower().endswith('.exe')):
+ exe += '.exe'
+
+ if not found:
+ # Can exe be found in the Android SDK?
+ try:
+ android_sdk_root = os.environ['ANDROID_SDK_ROOT']
+ exe_path = os.path.join(
+ android_sdk_root, subdir, exe)
+ if os.path.exists(exe_path):
+ found = True
+ else:
+ _log_debug(
+ "Unable to find executable at %s" % exe_path)
+ except KeyError:
+ _log_debug("ANDROID_SDK_ROOT not set")
+
+ if not found:
+ # Can exe be found in the default bootstrap location?
+ mozbuild_path = os.environ.get('MOZBUILD_STATE_PATH',
+ os.path.expanduser(os.path.join('~', '.mozbuild')))
+ exe_path = os.path.join(
+ mozbuild_path, 'android-sdk-linux', subdir, exe)
+ if os.path.exists(exe_path):
+ found = True
+ else:
+ _log_debug(
+ "Unable to find executable at %s" % exe_path)
+
+ if not found:
+ # Is exe on PATH?
+ exe_path = find_executable(exe)
+ if exe_path:
+ found = True
+ else:
+ _log_debug("Unable to find executable on PATH")
+
+ if found:
+ _log_debug("%s found at %s" % (exe, exe_path))
+ else:
+ exe_path = None
+ return exe_path
+
+
+def _log_debug(text):
+ if verbose_logging:
+ print "DEBUG: %s" % text
+
+
+def _log_warning(text):
+ print "WARNING: %s" % text
+
+
+def _log_info(text):
+ print "%s" % text
+
+
+def _download_file(url, filename, path):
+ f = urllib2.urlopen(url)
+ if not os.path.isdir(path):
+ try:
+ os.makedirs(path)
+ except Exception, e:
+ _log_warning(str(e))
+ return False
+ local_file = open(os.path.join(path, filename), 'wb')
+ local_file.write(f.read())
+ local_file.close()
+ _log_debug("Downloaded %s to %s/%s" % (url, path, filename))
+ return True
+
+
+def _get_tooltool_manifest(substs, src_path, dst_path, filename):
+ if not os.path.isdir(dst_path):
+ try:
+ os.makedirs(dst_path)
+ except Exception, e:
+ _log_warning(str(e))
+ copied = False
+ if substs and 'top_srcdir' in substs:
+ src = os.path.join(substs['top_srcdir'], src_path)
+ if os.path.exists(src):
+ dst = os.path.join(dst_path, filename)
+ shutil.copy(src, dst)
+ copied = True
+ _log_debug("Copied tooltool manifest %s to %s" % (src, dst))
+ if not copied:
+ url = os.path.join(TRY_URL, src_path)
+ _download_file(url, filename, dst_path)
+
+
+def _tooltool_fetch():
+ def outputHandler(line):
+ _log_debug(line)
+ _download_file(TOOLTOOL_URL, 'tooltool.py', EMULATOR_HOME_DIR)
+ command = [sys.executable, 'tooltool.py',
+ 'fetch', '-o', '-m', 'releng.manifest']
+ proc = ProcessHandler(
+ command, processOutputLine=outputHandler, storeOutput=False,
+ cwd=EMULATOR_HOME_DIR)
+ proc.run()
+ try:
+ proc.wait()
+ except:
+ if proc.poll() is None:
+ proc.kill(signal.SIGTERM)
+
+
+def _get_host_platform():
+ plat = None
+ if 'darwin' in str(sys.platform).lower():
+ plat = 'macosx64'
+ elif 'linux' in str(sys.platform).lower():
+ if '64' in platform.architecture()[0]:
+ plat = 'linux64'
+ else:
+ plat = 'linux32'
+ return plat
+
+
+def _get_device_platform(substs):
+ # PIE executables are required when SDK level >= 21 - important for gdbserver
+ adb_path = _find_sdk_exe(substs, 'adb', False)
+ if not adb_path:
+ adb_path = 'adb'
+ dm = DeviceManagerADB(autoconnect=False, adbPath=adb_path, retryLimit=1)
+ sdk_level = None
+ try:
+ cmd = ['getprop', 'ro.build.version.sdk']
+ _log_debug(cmd)
+ output = dm.shellCheckOutput(cmd, timeout=10)
+ if output:
+ sdk_level = int(output)
+ except:
+ _log_warning("unable to determine Android sdk level")
+ pie = ''
+ if sdk_level and sdk_level >= 21:
+ pie = '-pie'
+ if substs['TARGET_CPU'].startswith('arm'):
+ return 'arm%s' % pie
+ return 'x86%s' % pie
+
+
+def _update_gdbinit(substs, path):
+ if os.path.exists(path):
+ obj_replaced = False
+ src_replaced = False
+ # update existing objdir/srcroot in place
+ for line in fileinput.input(path, inplace=True):
+ if "feninit.default.objdir" in line and substs and 'MOZ_BUILD_ROOT' in substs:
+ print("python feninit.default.objdir = '%s'" % substs['MOZ_BUILD_ROOT'])
+ obj_replaced = True
+ elif "feninit.default.srcroot" in line and substs and 'top_srcdir' in substs:
+ print("python feninit.default.srcroot = '%s'" % substs['top_srcdir'])
+ src_replaced = True
+ else:
+ print(line.strip())
+ # append objdir/srcroot if not updated
+ if (not obj_replaced) and substs and 'MOZ_BUILD_ROOT' in substs:
+ with open(path, "a") as f:
+ f.write("\npython feninit.default.objdir = '%s'\n" % substs['MOZ_BUILD_ROOT'])
+ if (not src_replaced) and substs and 'top_srcdir' in substs:
+ with open(path, "a") as f:
+ f.write("python feninit.default.srcroot = '%s'\n" % substs['top_srcdir'])
+ else:
+ # write objdir/srcroot to new gdbinit file
+ with open(path, "w") as f:
+ if substs and 'MOZ_BUILD_ROOT' in substs:
+ f.write("python feninit.default.objdir = '%s'\n" % substs['MOZ_BUILD_ROOT'])
+ if substs and 'top_srcdir' in substs:
+ f.write("python feninit.default.srcroot = '%s'\n" % substs['top_srcdir'])
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/autophone.py b/testing/mozbase/mozrunner/mozrunner/devices/autophone.py
new file mode 100644
index 000000000..3b4913028
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/autophone.py
@@ -0,0 +1,651 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import glob
+import json
+import logging
+import os
+import shutil
+import signal
+import socket
+import sys
+import threading
+import time
+import which
+import BaseHTTPServer
+import SimpleHTTPServer
+
+from mozbuild.virtualenv import VirtualenvManager
+from mozdevice import DeviceManagerADB
+from mozprocess import ProcessHandler
+
+
+class AutophoneRunner(object):
+ """
+ Supporting the mach 'autophone' command: configure, run autophone.
+ """
+ config = {'base-dir': None,
+ 'requirements-installed': False,
+ 'devices-configured': False,
+ 'test-manifest': None}
+ CONFIG_FILE = os.path.join(os.path.expanduser('~'), '.mozbuild', 'autophone.json')
+
+ def __init__(self, build_obj, verbose):
+ self.build_obj = build_obj
+ self.verbose = verbose
+ self.autophone_options = []
+ self.httpd = None
+ self.webserver_required = False
+
+ def reset_to_clean(self):
+ """
+ If confirmed, remove the autophone directory and configuration.
+ """
+ dir = self.config['base-dir']
+ if dir and os.path.exists(dir) and os.path.exists(self.CONFIG_FILE):
+ self.build_obj.log(logging.WARN, "autophone", {},
+ "*** This will delete %s and reset your "
+ "'mach autophone' configuration! ***" % dir)
+ response = raw_input(
+ "Proceed with deletion? (y/N) ").strip()
+ if response.lower().startswith('y'):
+ os.remove(self.CONFIG_FILE)
+ shutil.rmtree(dir)
+ else:
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Already clean -- nothing to do!")
+
+ def save_config(self):
+ """
+ Persist self.config to a file.
+ """
+ try:
+ with open(self.CONFIG_FILE, 'w') as f:
+ json.dump(self.config, f)
+ if self.verbose:
+ print("saved configuration: %s" % self.config)
+ except:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "unable to save 'mach autophone' "
+ "configuration to %s" % self.CONFIG_FILE)
+ if self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ str(sys.exc_info()[0]))
+
+ def load_config(self):
+ """
+ Import the configuration info saved by save_config().
+ """
+ if os.path.exists(self.CONFIG_FILE):
+ try:
+ with open(self.CONFIG_FILE, 'r') as f:
+ self.config = json.load(f)
+ if self.verbose:
+ print("loaded configuration: %s" % self.config)
+ except:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "unable to load 'mach autophone' "
+ "configuration from %s" % self.CONFIG_FILE)
+ if self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ str(sys.exc_info()[0]))
+
+ def setup_directory(self):
+ """
+ Find the autophone source code location, or download if necessary.
+ """
+ keep_going = True
+ dir = self.config['base-dir']
+ if not dir:
+ dir = os.path.join(os.path.expanduser('~'), 'mach-autophone')
+ if os.path.exists(os.path.join(dir, '.git')):
+ response = raw_input(
+ "Run autophone from existing directory, %s (Y/n) " % dir).strip()
+ if 'n' not in response.lower():
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Configuring and running autophone at %s" % dir)
+ return keep_going
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Unable to find an existing autophone directory. "
+ "Let's setup a new one...")
+ response = raw_input(
+ "Enter location of new autophone directory: [%s] " % dir).strip()
+ if response != '':
+ dir = response
+ self.config['base-dir'] = dir
+ if not os.path.exists(os.path.join(dir, '.git')):
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Cloning autophone repository to '%s'..." % dir)
+ self.config['requirements-installed'] = False
+ self.config['devices-configured'] = False
+ self.run_process(['git', 'clone', 'https://github.com/mozilla/autophone', dir])
+ self.run_process(['git', 'submodule', 'update', '--init', '--remote'], cwd=dir)
+ if not os.path.exists(os.path.join(dir, '.git')):
+ # git not installed? File permission problem? github not available?
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "Unable to clone autophone directory.")
+ if not self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "Try re-running this command with --verbose to get more info.")
+ keep_going = False
+ return keep_going
+
+ def install_requirements(self):
+ """
+ Install required python modules in a virtualenv rooted at <autophone>/_virtualenv.
+ """
+ keep_going = True
+ dir = self.config['base-dir']
+ vdir = os.path.join(dir, '_virtualenv')
+ self.auto_virtualenv_manager = VirtualenvManager(self.build_obj.topsrcdir,
+ self.build_obj.topobjdir,
+ vdir, sys.stdout,
+ os.path.join(self.build_obj.topsrcdir,
+ 'build',
+ 'virtualenv_packages.txt'))
+ if not self.config['requirements-installed'] or not os.path.exists(vdir):
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Installing required modules in a virtualenv...")
+ self.auto_virtualenv_manager.build()
+ self.auto_virtualenv_manager._run_pip(['install', '-r',
+ os.path.join(dir, 'requirements.txt')])
+ self.config['requirements-installed'] = True
+ return keep_going
+
+ def configure_devices(self):
+ """
+ Ensure devices.ini is set up.
+ """
+ keep_going = True
+ device_ini = os.path.join(self.config['base-dir'], 'devices.ini')
+ if os.path.exists(device_ini):
+ response = raw_input(
+ "Use existing device configuration at %s? (Y/n) " % device_ini).strip()
+ if 'n' not in response.lower():
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Using device configuration at %s" % device_ini)
+ return keep_going
+ keep_going = False
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "You must configure at least one Android device "
+ "before running autophone.")
+ response = raw_input(
+ "Configure devices now? (Y/n) ").strip()
+ if response.lower().startswith('y') or response == '':
+ response = raw_input(
+ "Connect your rooted Android test device(s) with usb and press Enter ")
+ adb_path = 'adb'
+ try:
+ if os.path.exists(self.build_obj.substs["ADB"]):
+ adb_path = self.build_obj.substs["ADB"]
+ except:
+ if self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ str(sys.exc_info()[0]))
+ # No build environment?
+ try:
+ adb_path = which.which('adb')
+ except which.WhichError:
+ adb_path = raw_input(
+ "adb not found. Enter path to adb: ").strip()
+ if self.verbose:
+ print("Using adb at %s" % adb_path)
+ dm = DeviceManagerADB(autoconnect=False, adbPath=adb_path, retryLimit=1)
+ device_index = 1
+ try:
+ with open(os.path.join(self.config['base-dir'], 'devices.ini'), 'w') as f:
+ for device in dm.devices():
+ serial = device[0]
+ if self.verify_device(adb_path, serial):
+ f.write("[device-%d]\nserialno=%s\n" % (device_index, serial))
+ device_index += 1
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Added '%s' to device configuration." % serial)
+ keep_going = True
+ else:
+ self.build_obj.log(logging.WARNING, "autophone", {},
+ "Device '%s' is not rooted - skipping" % serial)
+ except:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "Failed to get list of connected Android devices.")
+ if self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ str(sys.exc_info()[0]))
+ keep_going = False
+ if device_index <= 1:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "No devices configured! (Can you see your rooted test device(s)"
+ " in 'adb devices'?")
+ keep_going = False
+ if keep_going:
+ self.config['devices-configured'] = True
+ return keep_going
+
+ def configure_tests(self):
+ """
+ Determine the required autophone --test-path option.
+ """
+ dir = self.config['base-dir']
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Autophone must be started with a 'test manifest' "
+ "describing the type(s) of test(s) to run.")
+ test_options = []
+ for ini in glob.glob(os.path.join(dir, 'tests', '*.ini')):
+ with open(ini, 'r') as f:
+ content = f.readlines()
+ for line in content:
+ if line.startswith('# @mach@ '):
+ webserver = False
+ if '@webserver@' in line:
+ webserver = True
+ line = line.replace('@webserver@', '')
+ test_options.append((line[9:].strip(), ini, webserver))
+ break
+ if len(test_options) >= 1:
+ test_options.sort()
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "These test manifests are available:")
+ index = 1
+ for option in test_options:
+ print("%d. %s" % (index, option[0]))
+ index += 1
+ highest = index - 1
+ path = None
+ while not path:
+ path = None
+ self.webserver_required = False
+ response = raw_input(
+ "Select test manifest (1-%d, or path to test manifest) " % highest).strip()
+ if os.path.isfile(response):
+ path = response
+ self.config['test-manifest'] = path
+ # Assume a webserver is required; if it isn't, user can provide a dummy url.
+ self.webserver_required = True
+ else:
+ try:
+ choice = int(response)
+ if choice >= 1 and choice <= highest:
+ path = test_options[choice - 1][1]
+ if test_options[choice - 1][2]:
+ self.webserver_required = True
+ else:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "'%s' invalid: Enter a number between "
+ "1 and %d!" % (response, highest))
+ except ValueError:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "'%s' unrecognized: Enter a number between "
+ "1 and %d!" % (response, highest))
+ self.autophone_options.extend(['--test-path', path])
+ else:
+ # Provide a simple backup for the unusual case where test manifests
+ # cannot be found.
+ response = ""
+ default = self.config['test-manifest'] or ""
+ while not os.path.isfile(response):
+ response = raw_input(
+ "Enter path to a test manifest: [%s] " % default).strip()
+ if response == "":
+ response = default
+ self.autophone_options.extend(['--test-path', response])
+ self.config['test-manifest'] = response
+ # Assume a webserver is required; if it isn't, user can provide a dummy url.
+ self.webserver_required = True
+
+ return True
+
+ def write_unittest_defaults(self, defaults_path, xre_path):
+ """
+ Write unittest-defaults.ini.
+ """
+ try:
+ # This should be similar to unittest-defaults.ini.example
+ with open(defaults_path, 'w') as f:
+ f.write("""\
+# Created by 'mach autophone'
+[runtests]
+xre_path = %s
+utility_path = %s
+console_level = DEBUG
+log_level = DEBUG
+time_out = 300""" % (xre_path, xre_path))
+ if self.verbose:
+ print("Created %s with host utilities path %s" % (defaults_path, xre_path))
+ except:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "Unable to create %s" % defaults_path)
+ if self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ str(sys.exc_info()[0]))
+
+ def configure_unittests(self):
+ """
+ Ensure unittest-defaults.ini is set up.
+ """
+ defaults_path = os.path.join(self.config['base-dir'], 'configs', 'unittest-defaults.ini')
+ if os.path.isfile(defaults_path):
+ response = raw_input(
+ "Use existing unit test configuration at %s? (Y/n) " % defaults_path).strip()
+ if 'n' in response.lower():
+ os.remove(defaults_path)
+ if not os.path.isfile(defaults_path):
+ xre_path = os.environ.get('MOZ_HOST_BIN')
+ if not xre_path or not os.path.isdir(xre_path):
+ emulator_path = os.path.join(os.path.expanduser('~'), '.mozbuild',
+ 'android-device')
+ xre_paths = glob.glob(os.path.join(emulator_path, 'host-utils*'))
+ for xre_path in xre_paths:
+ if os.path.isdir(xre_path):
+ break
+ if not xre_path or not os.path.isdir(xre_path) or \
+ not os.path.isfile(os.path.join(xre_path, 'xpcshell')):
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Some tests require access to 'host utilities' "
+ "such as xpcshell.")
+ xre_path = raw_input(
+ "Enter path to host utilities directory: ").strip()
+ if not xre_path or not os.path.isdir(xre_path) or \
+ not os.path.isfile(os.path.join(xre_path, 'xpcshell')):
+ self.build_obj.log(
+ logging.ERROR, "autophone", {},
+ "Unable to configure unit tests - no path to host utilities.")
+ return False
+ self.write_unittest_defaults(defaults_path, xre_path)
+ if os.path.isfile(defaults_path):
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Using unit test configuration at %s" % defaults_path)
+ return True
+
+ def configure_ip(self):
+ """
+ Determine what IP should be used for the autophone --ipaddr option.
+ """
+ # Take a guess at the IP to suggest. This won't always get the "right" IP,
+ # but will save some typing, sometimes.
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(('8.8.8.8', 0))
+ ip = s.getsockname()[0]
+ response = raw_input(
+ "IP address of interface to use for phone callbacks [%s] " % ip).strip()
+ if response == "":
+ response = ip
+ self.autophone_options.extend(['--ipaddr', response])
+ self.ipaddr = response
+ return True
+
+ def configure_webserver(self):
+ """
+ Determine the autophone --webserver-url option.
+ """
+ if self.webserver_required:
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Some of your selected tests require a webserver.")
+ response = raw_input("Start a webserver now? [Y/n] ").strip()
+ parts = []
+ while len(parts) != 2:
+ response2 = raw_input(
+ "Webserver address? [%s:8100] " % self.ipaddr).strip()
+ if response2 == "":
+ parts = [self.ipaddr, "8100"]
+ else:
+ parts = response2.split(":")
+ if len(parts) == 2:
+ addr = parts[0]
+ try:
+ port = int(parts[1])
+ if port <= 0:
+ self.build_obj.log(
+ logging.ERROR, "autophone", {},
+ "Port must be > 0. "
+ "Enter webserver address in the format <ip>:<port>")
+ parts = []
+ except ValueError:
+ self.build_obj.log(
+ logging.ERROR, "autophone", {},
+ "Port must be a number. "
+ "Enter webserver address in the format <ip>:<port>")
+ parts = []
+ else:
+ self.build_obj.log(
+ logging.ERROR, "autophone", {},
+ "Enter webserver address in the format <ip>:<port>")
+ if not ('n' in response.lower()):
+ self.launch_webserver(addr, port)
+ self.autophone_options.extend(['--webserver-url',
+ 'http://%s:%d' % (addr, port)])
+ return True
+
+ def configure_other(self):
+ """
+ Advanced users may set up additional options in autophone.ini.
+ Find and handle that case silently.
+ """
+ path = os.path.join(self.config['base-dir'], 'autophone.ini')
+ if os.path.isfile(path):
+ self.autophone_options.extend(['--config', path])
+ return True
+
+ def configure(self):
+ """
+ Ensure all configuration files are set up and determine autophone options.
+ """
+ return self.configure_devices() and \
+ self.configure_unittests() and \
+ self.configure_tests() and \
+ self.configure_ip() and \
+ self.configure_webserver() and \
+ self.configure_other()
+
+ def verify_device(self, adb_path, device):
+ """
+ Check that the specified device is available and rooted.
+ """
+ try:
+ dm = DeviceManagerADB(adbPath=adb_path, retryLimit=1, deviceSerial=device)
+ if dm._haveSu or dm._haveRootShell:
+ return True
+ except:
+ self.build_obj.log(
+ logging.WARN, "autophone", {},
+ "Unable to verify root on device.")
+ if self.verbose:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ str(sys.exc_info()[0]))
+ return False
+
+ def launch_autophone(self):
+ """
+ Launch autophone in its own thread and wait for autophone startup.
+ """
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Launching autophone...")
+ self.thread = threading.Thread(target=self.run_autophone)
+ self.thread.start()
+ # Wait for startup, so that autophone startup messages do not get mixed
+ # in with our interactive command prompts.
+ dir = self.config['base-dir']
+ started = False
+ for seconds in [5, 5, 3, 3, 1, 1, 1, 1]:
+ time.sleep(seconds)
+ if self.run_process(['./ap.sh', 'autophone-status'], cwd=dir, dump=False):
+ started = True
+ break
+ time.sleep(1)
+ if not started:
+ self.build_obj.log(logging.WARN, "autophone", {},
+ "Autophone is taking longer than expected to start.")
+
+ def run_autophone(self):
+ dir = self.config['base-dir']
+ cmd = [self.auto_virtualenv_manager.python_path, 'autophone.py']
+ cmd.extend(self.autophone_options)
+ self.run_process(cmd, cwd=dir, dump=True)
+
+ def command_prompts(self):
+ """
+ Interactive command prompts: Provide access to ap.sh and trigger_runs.py.
+ """
+ dir = self.config['base-dir']
+ if self.thread.isAlive():
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "Use 'trigger' to select builds to test using the current test manifest.")
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "Type 'trigger', 'help', 'quit', or an autophone command.")
+ quitting = False
+ while self.thread.isAlive() and not quitting:
+ response = raw_input(
+ "autophone command? ").strip().lower()
+ if response == "help":
+ self.run_process(['./ap.sh', 'autophone-help'], cwd=dir, dump=True)
+ print("""\
+
+Additional commands available in this interactive shell:
+
+trigger
+ Initiate autophone test runs. You will be prompted for a set of builds
+ to run tests against. (To run a different type of test, quit, run this
+ mach command again, and select a new test manifest.)
+
+quit
+ Shutdown autophone and exit this shell (short-cut to 'autophone-shutdown')
+
+ """)
+ continue
+ if response == "trigger":
+ self.trigger_prompts()
+ continue
+ if response == "quit":
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Quitting...")
+ response = "autophone-shutdown"
+ if response == "autophone-shutdown":
+ quitting = True
+ self.run_process(['./ap.sh', response], cwd=dir, dump=True)
+ if self.httpd:
+ self.httpd.shutdown()
+ self.thread.join()
+
+ def trigger_prompts(self):
+ """
+ Sub-prompts for the "trigger" command.
+ """
+ dir = self.config['base-dir']
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "Tests will be run against a build or collection of builds, selected by:")
+ print("""\
+1. The latest build
+2. Build URL
+3. Build ID
+4. Date/date-time range\
+ """)
+ highest = 4
+ choice = 0
+ while (choice < 1 or choice > highest):
+ response = raw_input(
+ "Build selection type? (1-%d) " % highest).strip()
+ try:
+ choice = int(response)
+ except ValueError:
+ self.build_obj.log(logging.ERROR, "autophone", {},
+ "Enter a number between 1 and %d" % highest)
+ choice = 0
+ if choice == 1:
+ options = ["latest"]
+ elif choice == 2:
+ url = raw_input(
+ "Enter url of build to test; may be an http or file schema ").strip()
+ options = ["--build-url=%s" % url]
+ elif choice == 3:
+ response = raw_input(
+ "Enter Build ID, eg 20120403063158 ").strip()
+ options = [response]
+ elif choice == 4:
+ start = raw_input(
+ "Enter start build date or date-time, "
+ "e.g. 2012-04-03 or 2012-04-03T06:31:58 ").strip()
+ end = raw_input(
+ "Enter end build date or date-time, "
+ "e.g. 2012-04-03 or 2012-04-03T06:31:58 ").strip()
+ options = [start, end]
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "You may optionally specify a repository name like 'mozilla-inbound' or 'try'.")
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "If not specified, 'mozilla-central' is assumed.")
+ repo = raw_input(
+ "Enter repository name: ").strip()
+ if len(repo) > 0:
+ options.extend(["--repo=%s" % repo])
+ if repo == "mozilla-central" or repo == "mozilla-aurora" or len(repo) < 1:
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "You may optionally specify the build location, like 'nightly' or 'tinderbox'.")
+ location = raw_input(
+ "Enter build location: ").strip()
+ if len(location) > 0:
+ options.extend(["--build-location=%s" % location])
+ else:
+ options.extend(["--build-location=tinderbox"])
+ cmd = [self.auto_virtualenv_manager.python_path, "trigger_runs.py"]
+ cmd.extend(options)
+ self.build_obj.log(
+ logging.INFO, "autophone", {},
+ "Triggering...Tests will run once builds have been downloaded.")
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Use 'autophone-status' to check progress.")
+ self.run_process(cmd, cwd=dir, dump=True)
+
+ def launch_webserver(self, addr, port):
+ """
+ Launch the webserver (in a separate thread).
+ """
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Launching webserver...")
+ self.webserver_addr = addr
+ self.webserver_port = port
+ self.threadweb = threading.Thread(target=self.run_webserver)
+ self.threadweb.start()
+
+ def run_webserver(self):
+ class AutoHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+ # A simple request handler with logging suppressed.
+
+ def log_message(self, format, *args):
+ pass
+
+ os.chdir(self.config['base-dir'])
+ address = (self.webserver_addr, self.webserver_port)
+ self.httpd = BaseHTTPServer.HTTPServer(address, AutoHTTPRequestHandler)
+ try:
+ self.httpd.serve_forever()
+ except KeyboardInterrupt:
+ print("Web server interrupted.")
+
+ def run_process(self, cmd, cwd=None, dump=False):
+ def _processOutput(line):
+ if self.verbose or dump:
+ print(line)
+
+ if self.verbose:
+ self.build_obj.log(logging.INFO, "autophone", {},
+ "Running '%s' in '%s'" % (cmd, cwd))
+ proc = ProcessHandler(cmd, cwd=cwd, processOutputLine=_processOutput,
+ processStderrLine=_processOutput)
+ proc.run()
+ proc_complete = False
+ try:
+ proc.wait()
+ if proc.proc.returncode == 0:
+ proc_complete = True
+ except:
+ if proc.poll() is None:
+ proc.kill(signal.SIGTERM)
+ if not proc_complete:
+ if not self.verbose:
+ print(proc.output)
+ return proc_complete
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/base.py b/testing/mozbase/mozrunner/mozrunner/devices/base.py
new file mode 100644
index 000000000..b5cf2b58f
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/base.py
@@ -0,0 +1,306 @@
+from ConfigParser import (
+ ConfigParser,
+ RawConfigParser
+)
+import datetime
+import os
+import posixpath
+import re
+import shutil
+import socket
+import subprocess
+import tempfile
+import time
+import traceback
+
+from mozdevice import DMError
+from mozprocess import ProcessHandler
+
+
+class Device(object):
+ connected = False
+ logcat_proc = None
+
+ def __init__(self, app_ctx, logdir=None, serial=None, restore=True):
+ self.app_ctx = app_ctx
+ self.dm = self.app_ctx.dm
+ self.restore = restore
+ self.serial = serial
+ self.logdir = os.path.abspath(os.path.expanduser(logdir))
+ self.added_files = set()
+ self.backup_files = set()
+
+ @property
+ def remote_profiles(self):
+ """
+ A list of remote profiles on the device.
+ """
+ remote_ini = self.app_ctx.remote_profiles_ini
+ if not self.dm.fileExists(remote_ini):
+ raise IOError("Remote file '%s' not found" % remote_ini)
+
+ local_ini = tempfile.NamedTemporaryFile()
+ self.dm.getFile(remote_ini, local_ini.name)
+ cfg = ConfigParser()
+ cfg.read(local_ini.name)
+
+ profiles = []
+ for section in cfg.sections():
+ if cfg.has_option(section, 'Path'):
+ if cfg.has_option(section, 'IsRelative') and cfg.getint(section, 'IsRelative'):
+ profiles.append(posixpath.join(posixpath.dirname(remote_ini),
+ cfg.get(section, 'Path')))
+ else:
+ profiles.append(cfg.get(section, 'Path'))
+ return profiles
+
+ def pull_minidumps(self):
+ """
+ Saves any minidumps found in the remote profile on the local filesystem.
+
+ :returns: Path to directory containing the dumps.
+ """
+ remote_dump_dir = posixpath.join(self.app_ctx.remote_profile, 'minidumps')
+ local_dump_dir = tempfile.mkdtemp()
+ self.dm.getDirectory(remote_dump_dir, local_dump_dir)
+ if os.listdir(local_dump_dir):
+ for f in self.dm.listFiles(remote_dump_dir):
+ self.dm.removeFile(posixpath.join(remote_dump_dir, f))
+ return local_dump_dir
+
+ def setup_profile(self, profile):
+ """
+ Copy profile to the device and update the remote profiles.ini
+ to point to the new profile.
+
+ :param profile: mozprofile object to copy over.
+ """
+ self.dm.remount()
+
+ if self.dm.dirExists(self.app_ctx.remote_profile):
+ self.dm.shellCheckOutput(['rm', '-r', self.app_ctx.remote_profile])
+
+ self.dm.pushDir(profile.profile, self.app_ctx.remote_profile)
+
+ timeout = 5 # seconds
+ starttime = datetime.datetime.now()
+ while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout):
+ if self.dm.fileExists(self.app_ctx.remote_profiles_ini):
+ break
+ time.sleep(1)
+ else:
+ print "timed out waiting for profiles.ini"
+
+ local_profiles_ini = tempfile.NamedTemporaryFile()
+ self.dm.getFile(self.app_ctx.remote_profiles_ini, local_profiles_ini.name)
+
+ config = ProfileConfigParser()
+ config.read(local_profiles_ini.name)
+ for section in config.sections():
+ if 'Profile' in section:
+ config.set(section, 'IsRelative', 0)
+ config.set(section, 'Path', self.app_ctx.remote_profile)
+
+ new_profiles_ini = tempfile.NamedTemporaryFile()
+ config.write(open(new_profiles_ini.name, 'w'))
+
+ self.backup_file(self.app_ctx.remote_profiles_ini)
+ self.dm.pushFile(new_profiles_ini.name, self.app_ctx.remote_profiles_ini)
+
+ # Ideally all applications would read the profile the same way, but in practice
+ # this isn't true. Perform application specific profile-related setup if necessary.
+ if hasattr(self.app_ctx, 'setup_profile'):
+ for remote_path in self.app_ctx.remote_backup_files:
+ self.backup_file(remote_path)
+ self.app_ctx.setup_profile(profile)
+
+ def _get_online_devices(self):
+ return [d[0] for d in self.dm.devices()
+ if d[1] != 'offline'
+ if not d[0].startswith('emulator')]
+
+ def connect(self):
+ """
+ Connects to a running device. If no serial was specified in the
+ constructor, defaults to the first entry in `adb devices`.
+ """
+ if self.connected:
+ return
+
+ if self.serial:
+ serial = self.serial
+ else:
+ online_devices = self._get_online_devices()
+ if not online_devices:
+ raise IOError("No devices connected. Ensure the device is on and "
+ "remote debugging via adb is enabled in the settings.")
+ serial = online_devices[0]
+
+ self.dm._deviceSerial = serial
+ self.dm.connect()
+ self.connected = True
+
+ if self.logdir:
+ # save logcat
+ logcat_log = os.path.join(self.logdir, '%s.log' % serial)
+ if os.path.isfile(logcat_log):
+ self._rotate_log(logcat_log)
+ self.logcat_proc = self.start_logcat(serial, logfile=logcat_log)
+
+ def start_logcat(self, serial, logfile=None, stream=None, filterspec=None):
+ logcat_args = [self.app_ctx.adb, '-s', '%s' % serial,
+ 'logcat', '-v', 'time', '-b', 'main', '-b', 'radio']
+ # only log filterspec
+ if filterspec:
+ logcat_args.extend(['-s', filterspec])
+ process_args = {}
+ if logfile:
+ process_args['logfile'] = logfile
+ elif stream:
+ process_args['stream'] = stream
+ proc = ProcessHandler(logcat_args, **process_args)
+ proc.run()
+ return proc
+
+ def reboot(self):
+ """
+ Reboots the device via adb.
+ """
+ self.dm.reboot(wait=True)
+
+ def install_busybox(self, busybox):
+ """
+ Installs busybox on the device.
+
+ :param busybox: Path to busybox binary to install.
+ """
+ self.dm.remount()
+ print 'pushing %s' % self.app_ctx.remote_busybox
+ self.dm.pushFile(busybox, self.app_ctx.remote_busybox, retryLimit=10)
+ # TODO for some reason using dm.shellCheckOutput doesn't work,
+ # while calling adb shell directly does.
+ args = [self.app_ctx.adb, '-s', self.dm._deviceSerial,
+ 'shell', 'cd /system/bin; chmod 555 busybox;'
+ 'for x in `./busybox --list`; do ln -s ./busybox $x; done']
+ adb = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ adb.wait()
+ self.dm._verifyZip()
+
+ def wait_for_net(self):
+ active = False
+ time_out = 0
+ while not active and time_out < 40:
+ proc = subprocess.Popen([self.app_ctx.adb, 'shell', '/system/bin/netcfg'],
+ stdout=subprocess.PIPE)
+ proc.stdout.readline() # ignore first line
+ line = proc.stdout.readline()
+ while line != "":
+ if (re.search(r'UP\s+[1-9]\d{0,2}\.\d{1,3}\.\d{1,3}\.\d{1,3}', line)):
+ active = True
+ break
+ line = proc.stdout.readline()
+ time_out += 1
+ time.sleep(1)
+ return active
+
+ def wait_for_port(self, port, timeout=300):
+ starttime = datetime.datetime.now()
+ while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout):
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(('localhost', port))
+ data = sock.recv(16)
+ sock.close()
+ if ':' in data:
+ return True
+ except:
+ traceback.print_exc()
+ time.sleep(1)
+ return False
+
+ def backup_file(self, remote_path):
+ if not self.restore:
+ return
+
+ if self.dm.fileExists(remote_path) or self.dm.dirExists(remote_path):
+ self.dm.copyTree(remote_path, '%s.orig' % remote_path)
+ self.backup_files.add(remote_path)
+ else:
+ self.added_files.add(remote_path)
+
+ def cleanup(self):
+ """
+ Cleanup the device.
+ """
+ if not self.restore:
+ return
+
+ try:
+ self.dm._verifyDevice()
+ except DMError:
+ return
+
+ self.dm.remount()
+ # Restore the original profile
+ for added_file in self.added_files:
+ self.dm.removeFile(added_file)
+
+ for backup_file in self.backup_files:
+ if self.dm.fileExists('%s.orig' % backup_file) or \
+ self.dm.dirExists('%s.orig' % backup_file):
+ self.dm.moveTree('%s.orig' % backup_file, backup_file)
+
+ # Perform application specific profile cleanup if necessary
+ if hasattr(self.app_ctx, 'cleanup_profile'):
+ self.app_ctx.cleanup_profile()
+
+ # Remove the test profile
+ self.dm.removeDir(self.app_ctx.remote_profile)
+
+ def _rotate_log(self, srclog, index=1):
+ """
+ Rotate a logfile, by recursively rotating logs further in the sequence,
+ deleting the last file if necessary.
+ """
+ basename = os.path.basename(srclog)
+ basename = basename[:-len('.log')]
+ if index > 1:
+ basename = basename[:-len('.1')]
+ basename = '%s.%d.log' % (basename, index)
+
+ destlog = os.path.join(self.logdir, basename)
+ if os.path.isfile(destlog):
+ if index == 3:
+ os.remove(destlog)
+ else:
+ self._rotate_log(destlog, index + 1)
+ shutil.move(srclog, destlog)
+
+
+class ProfileConfigParser(RawConfigParser):
+ """
+ Class to create profiles.ini config files
+
+ Subclass of RawConfigParser that outputs .ini files in the exact
+ format expected for profiles.ini, which is slightly different
+ than the default format.
+ """
+
+ def optionxform(self, optionstr):
+ return optionstr
+
+ def write(self, fp):
+ if self._defaults:
+ fp.write("[%s]\n" % ConfigParser.DEFAULTSECT)
+ for (key, value) in self._defaults.items():
+ fp.write("%s=%s\n" % (key, str(value).replace('\n', '\n\t')))
+ fp.write("\n")
+ for section in self._sections:
+ fp.write("[%s]\n" % section)
+ for (key, value) in self._sections[section].items():
+ if key == "__name__":
+ continue
+ if (value is not None) or (self._optcre == self.OPTCRE):
+ key = "=".join((key, str(value).replace('\n', '\n\t')))
+ fp.write("%s\n" % (key))
+ fp.write("\n")
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/emulator.py b/testing/mozbase/mozrunner/mozrunner/devices/emulator.py
new file mode 100644
index 000000000..adeae27ed
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/emulator.py
@@ -0,0 +1,288 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from telnetlib import Telnet
+import datetime
+import os
+import shutil
+import subprocess
+import tempfile
+import time
+
+from mozprocess import ProcessHandler
+
+from .base import Device
+from .emulator_battery import EmulatorBattery
+from .emulator_geo import EmulatorGeo
+from .emulator_screen import EmulatorScreen
+from ..errors import TimeoutException
+
+
+class ArchContext(object):
+
+ def __init__(self, arch, context, binary=None, avd=None, extra_args=None):
+ homedir = getattr(context, 'homedir', '')
+ kernel = os.path.join(homedir, 'prebuilts', 'qemu-kernel', '%s', '%s')
+ sysdir = os.path.join(homedir, 'out', 'target', 'product', '%s')
+ self.extra_args = []
+ self.binary = os.path.join(context.bindir or '', 'emulator')
+ if arch == 'x86':
+ self.binary = os.path.join(context.bindir or '', 'emulator-x86')
+ self.kernel = kernel % ('x86', 'kernel-qemu')
+ self.sysdir = sysdir % 'generic_x86'
+ elif avd:
+ self.avd = avd
+ self.extra_args = [
+ '-show-kernel', '-debug',
+ 'init,console,gles,memcheck,adbserver,adbclient,adb,avd_config,socket'
+ ]
+ else:
+ self.kernel = kernel % ('arm', 'kernel-qemu-armv7')
+ self.sysdir = sysdir % 'generic'
+ self.extra_args = ['-cpu', 'cortex-a8']
+
+ if binary:
+ self.binary = binary
+
+ if extra_args:
+ self.extra_args.extend(extra_args)
+
+
+class SDCard(object):
+
+ def __init__(self, emulator, size):
+ self.emulator = emulator
+ self.path = self.create_sdcard(size)
+
+ def create_sdcard(self, sdcard_size):
+ """
+ Creates an sdcard partition in the emulator.
+
+ :param sdcard_size: Size of partition to create, e.g '10MB'.
+ """
+ mksdcard = self.emulator.app_ctx.which('mksdcard')
+ path = tempfile.mktemp(prefix='sdcard', dir=self.emulator.tmpdir)
+ sdargs = [mksdcard, '-l', 'mySdCard', sdcard_size, path]
+ sd = subprocess.Popen(sdargs, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ retcode = sd.wait()
+ if retcode:
+ raise Exception('unable to create sdcard: exit code %d: %s'
+ % (retcode, sd.stdout.read()))
+ return path
+
+
+class BaseEmulator(Device):
+ port = None
+ proc = None
+ telnet = None
+
+ def __init__(self, app_ctx, **kwargs):
+ self.arch = ArchContext(kwargs.pop('arch', 'arm'), app_ctx,
+ binary=kwargs.pop('binary', None),
+ avd=kwargs.pop('avd', None))
+ super(BaseEmulator, self).__init__(app_ctx, **kwargs)
+ self.tmpdir = tempfile.mkdtemp()
+ # These rely on telnet
+ self.battery = EmulatorBattery(self)
+ self.geo = EmulatorGeo(self)
+ self.screen = EmulatorScreen(self)
+
+ @property
+ def args(self):
+ """
+ Arguments to pass into the emulator binary.
+ """
+ return [self.arch.binary]
+
+ def start(self):
+ """
+ Starts a new emulator.
+ """
+ if self.proc:
+ return
+
+ original_devices = set(self._get_online_devices())
+
+ # QEMU relies on atexit() to remove temporary files, which does not
+ # work since mozprocess uses SIGKILL to kill the emulator process.
+ # Use a customized temporary directory so we can clean it up.
+ os.environ['ANDROID_TMP'] = self.tmpdir
+
+ qemu_log = None
+ qemu_proc_args = {}
+ if self.logdir:
+ # save output from qemu to logfile
+ qemu_log = os.path.join(self.logdir, 'qemu.log')
+ if os.path.isfile(qemu_log):
+ self._rotate_log(qemu_log)
+ qemu_proc_args['logfile'] = qemu_log
+ else:
+ qemu_proc_args['processOutputLine'] = lambda line: None
+ self.proc = ProcessHandler(self.args, **qemu_proc_args)
+ self.proc.run()
+
+ devices = set(self._get_online_devices())
+ now = datetime.datetime.now()
+ while (devices - original_devices) == set([]):
+ time.sleep(1)
+ # Sometimes it takes more than 60s to launch emulator, so we
+ # increase timeout value to 180s. Please see bug 1143380.
+ if datetime.datetime.now() - now > datetime.timedelta(
+ seconds=180):
+ raise TimeoutException(
+ 'timed out waiting for emulator to start')
+ devices = set(self._get_online_devices())
+ devices = devices - original_devices
+ self.serial = devices.pop()
+ self.connect()
+
+ def _get_online_devices(self):
+ return [d[0] for d in self.dm.devices() if d[1] != 'offline' if
+ d[0].startswith('emulator')]
+
+ def connect(self):
+ """
+ Connects to a running device. If no serial was specified in the
+ constructor, defaults to the first entry in `adb devices`.
+ """
+ if self.connected:
+ return
+
+ super(BaseEmulator, self).connect()
+ serial = self.serial or self.dm._deviceSerial
+ self.port = int(serial[serial.rindex('-') + 1:])
+
+ def cleanup(self):
+ """
+ Cleans up and kills the emulator, if it was started by mozrunner.
+ """
+ super(BaseEmulator, self).cleanup()
+ if self.proc:
+ self.proc.kill()
+ self.proc = None
+ self.connected = False
+
+ # Remove temporary files
+ shutil.rmtree(self.tmpdir)
+
+ def _get_telnet_response(self, command=None):
+ output = []
+ assert self.telnet
+ if command is not None:
+ self.telnet.write('%s\n' % command)
+ while True:
+ line = self.telnet.read_until('\n')
+ output.append(line.rstrip())
+ if line.startswith('OK'):
+ return output
+ elif line.startswith('KO:'):
+ raise Exception('bad telnet response: %s' % line)
+
+ def _run_telnet(self, command):
+ if not self.telnet:
+ self.telnet = Telnet('localhost', self.port)
+ self._get_telnet_response()
+ return self._get_telnet_response(command)
+
+ def __del__(self):
+ if self.telnet:
+ self.telnet.write('exit\n')
+ self.telnet.read_all()
+
+
+class Emulator(BaseEmulator):
+
+ def __init__(self, app_ctx, arch, resolution=None, sdcard=None, userdata=None,
+ no_window=None, binary=None, **kwargs):
+ super(Emulator, self).__init__(app_ctx, arch=arch, binary=binary, **kwargs)
+
+ # emulator args
+ self.resolution = resolution or '320x480'
+ self._sdcard_size = sdcard
+ self._sdcard = None
+ self.userdata = tempfile.NamedTemporaryFile(prefix='userdata-qemu', dir=self.tmpdir)
+ self.initdata = userdata if userdata else os.path.join(self.arch.sysdir, 'userdata.img')
+ self.no_window = no_window
+
+ @property
+ def sdcard(self):
+ if self._sdcard_size and not self._sdcard:
+ self._sdcard = SDCard(self, self._sdcard_size).path
+ else:
+ return self._sdcard
+
+ @property
+ def args(self):
+ """
+ Arguments to pass into the emulator binary.
+ """
+ qemu_args = super(Emulator, self).args
+ qemu_args.extend([
+ '-kernel', self.arch.kernel,
+ '-sysdir', self.arch.sysdir,
+ '-data', self.userdata.name,
+ '-initdata', self.initdata,
+ '-wipe-data'])
+ if self.no_window:
+ qemu_args.append('-no-window')
+ if self.sdcard:
+ qemu_args.extend(['-sdcard', self.sdcard])
+ qemu_args.extend(['-memory', '512',
+ '-partition-size', '512',
+ '-verbose',
+ '-skin', self.resolution,
+ '-gpu', 'on',
+ '-qemu'] + self.arch.extra_args)
+ return qemu_args
+
+ def connect(self):
+ """
+ Connects to a running device. If no serial was specified in the
+ constructor, defaults to the first entry in `adb devices`.
+ """
+ if self.connected:
+ return
+
+ super(Emulator, self).connect()
+ self.geo.set_default_location()
+ self.screen.initialize()
+
+ # setup DNS fix for networking
+ self.app_ctx.dm.shellCheckOutput(['setprop', 'net.dns1', '10.0.2.3'])
+
+ def cleanup(self):
+ """
+ Cleans up and kills the emulator, if it was started by mozrunner.
+ """
+ super(Emulator, self).cleanup()
+ # Remove temporary files
+ self.userdata.close()
+
+
+class EmulatorAVD(BaseEmulator):
+
+ def __init__(self, app_ctx, binary, avd, port=5554, **kwargs):
+ super(EmulatorAVD, self).__init__(app_ctx, binary=binary, avd=avd, **kwargs)
+ self.port = port
+
+ @property
+ def args(self):
+ """
+ Arguments to pass into the emulator binary.
+ """
+ qemu_args = super(EmulatorAVD, self).args
+ qemu_args.extend(['-avd', self.arch.avd,
+ '-port', str(self.port)])
+ qemu_args.extend(self.arch.extra_args)
+ return qemu_args
+
+ def start(self):
+ if self.proc:
+ return
+
+ env = os.environ
+ env['ANDROID_AVD_HOME'] = self.app_ctx.avd_home
+
+ super(EmulatorAVD, self).start()
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/emulator_battery.py b/testing/mozbase/mozrunner/mozrunner/devices/emulator_battery.py
new file mode 100644
index 000000000..6f389152b
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/emulator_battery.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class EmulatorBattery(object):
+
+ def __init__(self, emulator):
+ self.emulator = emulator
+
+ def get_state(self):
+ status = {}
+ state = {}
+
+ response = self.emulator._run_telnet('power display')
+ for line in response:
+ if ':' in line:
+ field, value = line.split(':')
+ value = value.strip()
+ if value == 'true':
+ value = True
+ elif value == 'false':
+ value = False
+ elif field == 'capacity':
+ value = float(value)
+ status[field] = value
+
+ state['level'] = status.get('capacity', 0.0) / 100
+ if status.get('AC') == 'online':
+ state['charging'] = True
+ else:
+ state['charging'] = False
+
+ return state
+
+ def get_charging(self):
+ return self.get_state()['charging']
+
+ def get_level(self):
+ return self.get_state()['level']
+
+ def set_level(self, level):
+ self.emulator._run_telnet('power capacity %d' % (level * 100))
+
+ def set_charging(self, charging):
+ if charging:
+ cmd = 'power ac on'
+ else:
+ cmd = 'power ac off'
+ self.emulator._run_telnet(cmd)
+
+ charging = property(get_charging, set_charging)
+ level = property(get_level, set_level)
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/emulator_geo.py b/testing/mozbase/mozrunner/mozrunner/devices/emulator_geo.py
new file mode 100644
index 000000000..a8ec0e089
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/emulator_geo.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class EmulatorGeo(object):
+
+ def __init__(self, emulator):
+ self.emulator = emulator
+
+ def set_default_location(self):
+ self.lon = -122.08769
+ self.lat = 37.41857
+ self.set_location(self.lon, self.lat)
+
+ def set_location(self, lon, lat):
+ self.emulator._run_telnet('geo fix %0.5f %0.5f' % (self.lon, self.lat))
diff --git a/testing/mozbase/mozrunner/mozrunner/devices/emulator_screen.py b/testing/mozbase/mozrunner/mozrunner/devices/emulator_screen.py
new file mode 100644
index 000000000..58bdda812
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/devices/emulator_screen.py
@@ -0,0 +1,89 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class EmulatorScreen(object):
+ """Class for screen related emulator commands."""
+
+ SO_PORTRAIT_PRIMARY = 'portrait-primary'
+ SO_PORTRAIT_SECONDARY = 'portrait-secondary'
+ SO_LANDSCAPE_PRIMARY = 'landscape-primary'
+ SO_LANDSCAPE_SECONDARY = 'landscape-secondary'
+
+ def __init__(self, emulator):
+ self.emulator = emulator
+
+ def initialize(self):
+ self.orientation = self.SO_PORTRAIT_PRIMARY
+
+ def _get_raw_orientation(self):
+ """Get the raw value of the current device orientation."""
+ response = self.emulator._run_telnet('sensor get orientation')
+
+ return response[0].split('=')[1].strip()
+
+ def _set_raw_orientation(self, data):
+ """Set the raw value of the specified device orientation."""
+ self.emulator._run_telnet('sensor set orientation %s' % data)
+
+ def get_orientation(self):
+ """Get the current device orientation.
+
+ Returns;
+ orientation -- Orientation of the device. One of:
+ SO_PORTRAIT_PRIMARY - system buttons at the bottom
+ SO_PORTRIAT_SECONDARY - system buttons at the top
+ SO_LANDSCAPE_PRIMARY - system buttons at the right
+ SO_LANDSCAPE_SECONDARY - system buttons at the left
+
+ """
+ data = self._get_raw_orientation()
+
+ if data == '0:-90:0':
+ orientation = self.SO_PORTRAIT_PRIMARY
+ elif data == '0:90:0':
+ orientation = self.SO_PORTRAIT_SECONDARY
+ elif data == '0:0:90':
+ orientation = self.SO_LANDSCAPE_PRIMARY
+ elif data == '0:0:-90':
+ orientation = self.SO_LANDSCAPE_SECONDARY
+ else:
+ raise ValueError('Unknown orientation sensor value: %s.' % data)
+
+ return orientation
+
+ def set_orientation(self, orientation):
+ """Set the specified device orientation.
+
+ Args
+ orientation -- Orientation of the device. One of:
+ SO_PORTRAIT_PRIMARY - system buttons at the bottom
+ SO_PORTRIAT_SECONDARY - system buttons at the top
+ SO_LANDSCAPE_PRIMARY - system buttons at the right
+ SO_LANDSCAPE_SECONDARY - system buttons at the left
+ """
+ orientation = SCREEN_ORIENTATIONS[orientation]
+
+ if orientation == self.SO_PORTRAIT_PRIMARY:
+ data = '0:-90:0'
+ elif orientation == self.SO_PORTRAIT_SECONDARY:
+ data = '0:90:0'
+ elif orientation == self.SO_LANDSCAPE_PRIMARY:
+ data = '0:0:90'
+ elif orientation == self.SO_LANDSCAPE_SECONDARY:
+ data = '0:0:-90'
+ else:
+ raise ValueError('Invalid orientation: %s' % orientation)
+
+ self._set_raw_orientation(data)
+
+ orientation = property(get_orientation, set_orientation)
+
+
+SCREEN_ORIENTATIONS = {"portrait": EmulatorScreen.SO_PORTRAIT_PRIMARY,
+ "landscape": EmulatorScreen.SO_LANDSCAPE_PRIMARY,
+ "portrait-primary": EmulatorScreen.SO_PORTRAIT_PRIMARY,
+ "landscape-primary": EmulatorScreen.SO_LANDSCAPE_PRIMARY,
+ "portrait-secondary": EmulatorScreen.SO_PORTRAIT_SECONDARY,
+ "landscape-secondary": EmulatorScreen.SO_LANDSCAPE_SECONDARY}
diff --git a/testing/mozbase/mozrunner/mozrunner/errors.py b/testing/mozbase/mozrunner/mozrunner/errors.py
new file mode 100644
index 000000000..2c4ea50d5
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/errors.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class RunnerException(Exception):
+ """Base exception handler for mozrunner related errors"""
+
+
+class RunnerNotStartedError(RunnerException):
+ """Exception handler in case the runner hasn't been started"""
+
+
+class TimeoutException(RunnerException):
+ """Raised on timeout waiting for targets to start."""
diff --git a/testing/mozbase/mozrunner/mozrunner/resources/metrotestharness.exe b/testing/mozbase/mozrunner/mozrunner/resources/metrotestharness.exe
new file mode 100644
index 000000000..d3bcbfbee
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/resources/metrotestharness.exe
Binary files differ
diff --git a/testing/mozbase/mozrunner/mozrunner/runners.py b/testing/mozbase/mozrunner/mozrunner/runners.py
new file mode 100644
index 000000000..4d8e3e130
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/runners.py
@@ -0,0 +1,211 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+This module contains a set of shortcut methods that create runners for commonly
+used Mozilla applications, such as Firefox or B2G emulator.
+"""
+
+from .application import get_app_context
+from .base import DeviceRunner, GeckoRuntimeRunner, FennecRunner
+from .devices import Emulator, EmulatorAVD, Device
+
+
+def Runner(*args, **kwargs):
+ """
+ Create a generic GeckoRuntime runner.
+
+ :param binary: Path to binary.
+ :param cmdargs: Arguments to pass into binary.
+ :param profile: Profile object to use.
+ :param env: Environment variables to pass into the gecko process.
+ :param clean_profile: If True, restores profile back to original state.
+ :param process_class: Class used to launch the binary.
+ :param process_args: Arguments to pass into process_class.
+ :param symbols_path: Path to symbol files used for crash analysis.
+ :param show_crash_reporter: allow the crash reporter window to pop up.
+ Defaults to False.
+ :returns: A generic GeckoRuntimeRunner.
+ """
+ return GeckoRuntimeRunner(*args, **kwargs)
+
+
+def FirefoxRunner(*args, **kwargs):
+ """
+ Create a desktop Firefox runner.
+
+ :param binary: Path to Firefox binary.
+ :param cmdargs: Arguments to pass into binary.
+ :param profile: Profile object to use.
+ :param env: Environment variables to pass into the gecko process.
+ :param clean_profile: If True, restores profile back to original state.
+ :param process_class: Class used to launch the binary.
+ :param process_args: Arguments to pass into process_class.
+ :param symbols_path: Path to symbol files used for crash analysis.
+ :param show_crash_reporter: allow the crash reporter window to pop up.
+ Defaults to False.
+ :returns: A GeckoRuntimeRunner for Firefox.
+ """
+ kwargs['app_ctx'] = get_app_context('firefox')()
+ return GeckoRuntimeRunner(*args, **kwargs)
+
+
+def ThunderbirdRunner(*args, **kwargs):
+ """
+ Create a desktop Thunderbird runner.
+
+ :param binary: Path to Thunderbird binary.
+ :param cmdargs: Arguments to pass into binary.
+ :param profile: Profile object to use.
+ :param env: Environment variables to pass into the gecko process.
+ :param clean_profile: If True, restores profile back to original state.
+ :param process_class: Class used to launch the binary.
+ :param process_args: Arguments to pass into process_class.
+ :param symbols_path: Path to symbol files used for crash analysis.
+ :param show_crash_reporter: allow the crash reporter window to pop up.
+ Defaults to False.
+ :returns: A GeckoRuntimeRunner for Thunderbird.
+ """
+ kwargs['app_ctx'] = get_app_context('thunderbird')()
+ return GeckoRuntimeRunner(*args, **kwargs)
+
+
+def B2GDesktopRunner(*args, **kwargs):
+ """
+ Create a B2G desktop runner.
+
+ :param binary: Path to b2g desktop binary.
+ :param cmdargs: Arguments to pass into binary.
+ :param profile: Profile object to use.
+ :param env: Environment variables to pass into the gecko process.
+ :param clean_profile: If True, restores profile back to original state.
+ :param process_class: Class used to launch the binary.
+ :param process_args: Arguments to pass into process_class.
+ :param symbols_path: Path to symbol files used for crash analysis.
+ :param show_crash_reporter: allow the crash reporter window to pop up.
+ Defaults to False.
+ :returns: A GeckoRuntimeRunner for b2g desktop.
+ """
+ # There is no difference between a generic and b2g desktop runner,
+ # but expose a separate entry point for clarity.
+ return Runner(*args, **kwargs)
+
+
+def FennecEmulatorRunner(avd='mozemulator-4.3',
+ adb_path=None,
+ avd_home=None,
+ logdir=None,
+ serial=None,
+ binary=None,
+ app='org.mozilla.fennec',
+ **kwargs):
+ """
+ Create a Fennec emulator runner. This can either start a new emulator
+ (which will use an avd), or connect to an already-running emulator.
+
+ :param avd: name of an AVD available in your environment.
+ Typically obtained via tooltool: either 'mozemulator-4.3' or 'mozemulator-x86'.
+ Defaults to 'mozemulator-4.3'
+ :param avd_home: Path to avd parent directory
+ :param logdir: Path to save logfiles such as logcat and qemu output.
+ :param serial: Serial of emulator to connect to as seen in `adb devices`.
+ Defaults to the first entry in `adb devices`.
+ :param binary: Path to emulator binary.
+ Defaults to None, which causes the device_class to guess based on PATH.
+ :param app: Name of Fennec app (often org.mozilla.fennec_$USER)
+ Defaults to 'org.mozilla.fennec'
+ :param cmdargs: Arguments to pass into binary.
+ :returns: A DeviceRunner for Android emulators.
+ """
+ kwargs['app_ctx'] = get_app_context('fennec')(app, adb_path=adb_path,
+ avd_home=avd_home)
+ device_args = {'app_ctx': kwargs['app_ctx'],
+ 'avd': avd,
+ 'binary': binary,
+ 'serial': serial,
+ 'logdir': logdir}
+ return FennecRunner(device_class=EmulatorAVD,
+ device_args=device_args,
+ **kwargs)
+
+
+def B2GEmulatorRunner(arch='arm',
+ b2g_home=None,
+ adb_path=None,
+ logdir=None,
+ binary=None,
+ no_window=None,
+ resolution=None,
+ sdcard=None,
+ userdata=None,
+ **kwargs):
+ """
+ Create a B2G emulator runner.
+
+ :param arch: The architecture of the emulator, either 'arm' or 'x86'. Defaults to 'arm'.
+ :param b2g_home: Path to root B2G repository.
+ :param logdir: Path to save logfiles such as logcat and qemu output.
+ :param no_window: Run emulator without a window.
+ :param resolution: Screen resolution to set emulator to, e.g '800x1000'.
+ :param sdcard: Path to local emulated sdcard storage.
+ :param userdata: Path to custom userdata image.
+ :param profile: Profile object to use.
+ :param env: Environment variables to pass into the b2g.sh process.
+ :param clean_profile: If True, restores profile back to original state.
+ :param process_class: Class used to launch the b2g.sh process.
+ :param process_args: Arguments to pass into the b2g.sh process.
+ :param symbols_path: Path to symbol files used for crash analysis.
+ :returns: A DeviceRunner for B2G emulators.
+ """
+ kwargs['app_ctx'] = get_app_context('b2g')(b2g_home, adb_path=adb_path)
+ device_args = {'app_ctx': kwargs['app_ctx'],
+ 'arch': arch,
+ 'binary': binary,
+ 'resolution': resolution,
+ 'sdcard': sdcard,
+ 'userdata': userdata,
+ 'no_window': no_window,
+ 'logdir': logdir}
+ return DeviceRunner(device_class=Emulator,
+ device_args=device_args,
+ **kwargs)
+
+
+def B2GDeviceRunner(b2g_home=None,
+ adb_path=None,
+ logdir=None,
+ serial=None,
+ **kwargs):
+ """
+ Create a B2G device runner.
+
+ :param b2g_home: Path to root B2G repository.
+ :param logdir: Path to save logfiles such as logcat.
+ :param serial: Serial of device to connect to as seen in `adb devices`.
+ :param profile: Profile object to use.
+ :param env: Environment variables to pass into the b2g.sh process.
+ :param clean_profile: If True, restores profile back to original state.
+ :param process_class: Class used to launch the b2g.sh process.
+ :param process_args: Arguments to pass into the b2g.sh process.
+ :param symbols_path: Path to symbol files used for crash analysis.
+ :returns: A DeviceRunner for B2G devices.
+ """
+ kwargs['app_ctx'] = get_app_context('b2g')(b2g_home, adb_path=adb_path)
+ device_args = {'app_ctx': kwargs['app_ctx'],
+ 'logdir': logdir,
+ 'serial': serial}
+ return DeviceRunner(device_class=Device,
+ device_args=device_args,
+ **kwargs)
+
+
+runners = {
+ 'default': Runner,
+ 'b2g_desktop': B2GDesktopRunner,
+ 'b2g_emulator': B2GEmulatorRunner,
+ 'b2g_device': B2GDeviceRunner,
+ 'firefox': FirefoxRunner,
+ 'thunderbird': ThunderbirdRunner,
+ 'fennec': FennecEmulatorRunner
+}
diff --git a/testing/mozbase/mozrunner/mozrunner/utils.py b/testing/mozbase/mozrunner/mozrunner/utils.py
new file mode 100755
index 000000000..f96c94398
--- /dev/null
+++ b/testing/mozbase/mozrunner/mozrunner/utils.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Utility functions for mozrunner"""
+
+import mozinfo
+import os
+import sys
+
+__all__ = ['findInPath', 'get_metadata_from_egg']
+
+
+# python package method metadata by introspection
+try:
+ import pkg_resources
+
+ def get_metadata_from_egg(module):
+ ret = {}
+ try:
+ dist = pkg_resources.get_distribution(module)
+ except pkg_resources.DistributionNotFound:
+ return {}
+ if dist.has_metadata("PKG-INFO"):
+ key = None
+ value = ""
+ for line in dist.get_metadata("PKG-INFO").splitlines():
+ # see http://www.python.org/dev/peps/pep-0314/
+ if key == 'Description':
+ # descriptions can be long
+ if not line or line[0].isspace():
+ value += '\n' + line
+ continue
+ else:
+ key = key.strip()
+ value = value.strip()
+ ret[key] = value
+
+ key, value = line.split(':', 1)
+ key = key.strip()
+ value = value.strip()
+ ret[key] = value
+ if dist.has_metadata("requires.txt"):
+ ret["Dependencies"] = "\n" + dist.get_metadata("requires.txt")
+ return ret
+except ImportError:
+ # package resources not avaialable
+ def get_metadata_from_egg(module):
+ return {}
+
+
+def findInPath(fileName, path=os.environ['PATH']):
+ """python equivalent of which; should really be in the stdlib"""
+ dirs = path.split(os.pathsep)
+ for dir in dirs:
+ if os.path.isfile(os.path.join(dir, fileName)):
+ return os.path.join(dir, fileName)
+ if mozinfo.isWin:
+ if os.path.isfile(os.path.join(dir, fileName + ".exe")):
+ return os.path.join(dir, fileName + ".exe")
+
+if __name__ == '__main__':
+ for i in sys.argv[1:]:
+ print findInPath(i)
+
+
+def _find_marionette_in_args(*args, **kwargs):
+ try:
+ m = [a for a in args + tuple(kwargs.values()) if hasattr(a, 'session')][0]
+ except IndexError:
+ print("Can only apply decorator to function using a marionette object")
+ raise
+ return m
+
+
+def _raw_log():
+ import logging
+ return logging.getLogger(__name__)
+
+
+def test_environment(xrePath, env=None, crashreporter=True, debugger=False,
+ dmdPath=None, lsanPath=None, log=None):
+ """
+ populate OS environment variables for mochitest and reftests.
+
+ Originally comes from automationutils.py. Don't use that for new code.
+ """
+
+ env = os.environ.copy() if env is None else env
+ log = log or _raw_log()
+
+ assert os.path.isabs(xrePath)
+
+ if mozinfo.isMac:
+ ldLibraryPath = os.path.join(os.path.dirname(xrePath), "MacOS")
+ else:
+ ldLibraryPath = xrePath
+
+ envVar = None
+ dmdLibrary = None
+ preloadEnvVar = None
+ if 'toolkit' in mozinfo.info and mozinfo.info['toolkit'] == "gonk":
+ # Skip all of this, it's only valid for the host.
+ pass
+ elif mozinfo.isUnix:
+ envVar = "LD_LIBRARY_PATH"
+ env['MOZILLA_FIVE_HOME'] = xrePath
+ dmdLibrary = "libdmd.so"
+ preloadEnvVar = "LD_PRELOAD"
+ elif mozinfo.isMac:
+ envVar = "DYLD_LIBRARY_PATH"
+ dmdLibrary = "libdmd.dylib"
+ preloadEnvVar = "DYLD_INSERT_LIBRARIES"
+ elif mozinfo.isWin:
+ envVar = "PATH"
+ dmdLibrary = "dmd.dll"
+ preloadEnvVar = "MOZ_REPLACE_MALLOC_LIB"
+ if envVar:
+ envValue = ((env.get(envVar), str(ldLibraryPath))
+ if mozinfo.isWin
+ else (ldLibraryPath, dmdPath, env.get(envVar)))
+ env[envVar] = os.path.pathsep.join([path for path in envValue if path])
+
+ if dmdPath and dmdLibrary and preloadEnvVar:
+ env[preloadEnvVar] = os.path.join(dmdPath, dmdLibrary)
+
+ # crashreporter
+ env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
+ env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
+
+ if crashreporter and not debugger:
+ env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
+ env['MOZ_CRASHREPORTER'] = '1'
+ else:
+ env['MOZ_CRASHREPORTER_DISABLE'] = '1'
+
+ # Crash on non-local network connections by default.
+ # MOZ_DISABLE_NONLOCAL_CONNECTIONS can be set to "0" to temporarily
+ # enable non-local connections for the purposes of local testing. Don't
+ # override the user's choice here. See bug 1049688.
+ env.setdefault('MOZ_DISABLE_NONLOCAL_CONNECTIONS', '1')
+
+ # Set WebRTC logging in case it is not set yet
+ env.setdefault(
+ 'MOZ_LOG',
+ 'signaling:3,mtransport:4,DataChannel:4,jsep:4,MediaPipelineFactory:4'
+ )
+ env.setdefault('R_LOG_LEVEL', '6')
+ env.setdefault('R_LOG_DESTINATION', 'stderr')
+ env.setdefault('R_LOG_VERBOSE', '1')
+
+ # ASan specific environment stuff
+ asan = bool(mozinfo.info.get("asan"))
+ if asan and (mozinfo.isLinux or mozinfo.isMac):
+ try:
+ # Symbolizer support
+ llvmsym = os.path.join(xrePath, "llvm-symbolizer")
+ if os.path.isfile(llvmsym):
+ env["ASAN_SYMBOLIZER_PATH"] = llvmsym
+ log.info("INFO | runtests.py | ASan using symbolizer at %s"
+ % llvmsym)
+ else:
+ log.info("TEST-UNEXPECTED-FAIL | runtests.py | Failed to find"
+ " ASan symbolizer at %s" % llvmsym)
+
+ # Returns total system memory in kilobytes.
+ # Works only on unix-like platforms where `free` is in the path.
+ totalMemory = int(os.popen("free").readlines()[1].split()[1])
+
+ # Only 4 GB RAM or less available? Use custom ASan options to reduce
+ # the amount of resources required to do the tests. Standard options
+ # will otherwise lead to OOM conditions on the current test slaves.
+ message = "INFO | runtests.py | ASan running in %s configuration"
+ asanOptions = []
+ if totalMemory <= 1024 * 1024 * 4:
+ message = message % 'low-memory'
+ asanOptions = [
+ 'quarantine_size=50331648', 'malloc_context_size=5']
+ else:
+ message = message % 'default memory'
+
+ if lsanPath:
+ log.info("LSan enabled.")
+ asanOptions.append('detect_leaks=1')
+ lsanOptions = ["exitcode=0"]
+ # Uncomment out the next line to report the addresses of leaked objects.
+ # lsanOptions.append("report_objects=1")
+ suppressionsFile = os.path.join(
+ lsanPath, 'lsan_suppressions.txt')
+ if os.path.exists(suppressionsFile):
+ log.info("LSan using suppression file " + suppressionsFile)
+ lsanOptions.append("suppressions=" + suppressionsFile)
+ else:
+ log.info("WARNING | runtests.py | LSan suppressions file"
+ " does not exist! " + suppressionsFile)
+ env["LSAN_OPTIONS"] = ':'.join(lsanOptions)
+
+ if len(asanOptions):
+ env['ASAN_OPTIONS'] = ':'.join(asanOptions)
+
+ except OSError as err:
+ log.info("Failed determine available memory, disabling ASan"
+ " low-memory configuration: %s" % err.strerror)
+ except:
+ log.info("Failed determine available memory, disabling ASan"
+ " low-memory configuration")
+ else:
+ log.info(message)
+
+ tsan = bool(mozinfo.info.get("tsan"))
+ if tsan and mozinfo.isLinux:
+ # Symbolizer support.
+ llvmsym = os.path.join(xrePath, "llvm-symbolizer")
+ if os.path.isfile(llvmsym):
+ env["TSAN_OPTIONS"] = "external_symbolizer_path=%s" % llvmsym
+ log.info("INFO | runtests.py | TSan using symbolizer at %s"
+ % llvmsym)
+ else:
+ log.info("TEST-UNEXPECTED-FAIL | runtests.py | Failed to find TSan"
+ " symbolizer at %s" % llvmsym)
+
+ return env
+
+
+def get_stack_fixer_function(utilityPath, symbolsPath):
+ """
+ Return a stack fixing function, if possible, to use on output lines.
+
+ A stack fixing function checks if a line conforms to the output from
+ MozFormatCodeAddressDetails. If the line does not, the line is returned
+ unchanged. If the line does, an attempt is made to convert the
+ file+offset into something human-readable (e.g. a function name).
+ """
+ if not mozinfo.info.get('debug'):
+ return None
+
+ def import_stack_fixer_module(module_name):
+ sys.path.insert(0, utilityPath)
+ module = __import__(module_name, globals(), locals(), [])
+ sys.path.pop(0)
+ return module
+
+ if symbolsPath and os.path.exists(symbolsPath):
+ # Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad
+ # symbol files).
+ # This method is preferred for Tinderbox builds, since native
+ # symbols may have been stripped.
+ stack_fixer_module = import_stack_fixer_module(
+ 'fix_stack_using_bpsyms')
+
+ def stack_fixer_function(line):
+ return stack_fixer_module.fixSymbols(line, symbolsPath)
+
+ elif mozinfo.isMac:
+ # Run each line through fix_macosx_stack.py (uses atos).
+ # This method is preferred for developer machines, so we don't
+ # have to run "make buildsymbols".
+ stack_fixer_module = import_stack_fixer_module(
+ 'fix_macosx_stack')
+
+ def stack_fixer_function(line):
+ return stack_fixer_module.fixSymbols(line)
+
+ elif mozinfo.isLinux:
+ # Run each line through fix_linux_stack.py (uses addr2line).
+ # This method is preferred for developer machines, so we don't
+ # have to run "make buildsymbols".
+ stack_fixer_module = import_stack_fixer_module(
+ 'fix_linux_stack')
+
+ def stack_fixer_function(line):
+ return stack_fixer_module.fixSymbols(line)
+
+ else:
+ return None
+
+ return stack_fixer_function
diff --git a/testing/mozbase/mozrunner/setup.py b/testing/mozbase/mozrunner/setup.py
new file mode 100644
index 000000000..23ffe88de
--- /dev/null
+++ b/testing/mozbase/mozrunner/setup.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from setuptools import setup, find_packages
+
+PACKAGE_NAME = 'mozrunner'
+PACKAGE_VERSION = '6.13'
+
+desc = """Reliable start/stop/configuration of Mozilla Applications (Firefox, Thunderbird, etc.)"""
+
+deps = ['mozdevice >= 0.37',
+ 'mozfile >= 1.0',
+ 'mozinfo >= 0.7',
+ 'mozlog >= 3.0',
+ 'mozprocess >= 0.23',
+ 'mozprofile >= 0.18',
+ ]
+
+EXTRAS_REQUIRE = {'crash': ['mozcrash >= 1.0']}
+
+# we only support python 2 right now
+assert sys.version_info[0] == 2
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description=desc,
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=['Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ ],
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL 2.0',
+ packages=find_packages(),
+ package_data={'mozrunner': [
+ 'resources/metrotestharness.exe'
+ ]},
+ zip_safe=False,
+ install_requires=deps,
+ extras_require=EXTRAS_REQUIRE,
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozrunner = mozrunner:cli
+ """)
diff --git a/testing/mozbase/mozrunner/tests/manifest.ini b/testing/mozbase/mozrunner/tests/manifest.ini
new file mode 100644
index 000000000..62af8fb30
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/manifest.ini
@@ -0,0 +1,7 @@
+[test_crash.py]
+[test_interactive.py]
+[test_start.py]
+[test_states.py]
+[test_stop.py]
+[test_threads.py]
+[test_wait.py]
diff --git a/testing/mozbase/mozrunner/tests/mozrunnertest.py b/testing/mozbase/mozrunner/tests/mozrunnertest.py
new file mode 100644
index 000000000..33f51031f
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/mozrunnertest.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozprofile
+import mozrunner
+
+
+@unittest.skipIf(not os.environ.get('BROWSER_PATH'),
+ 'No binary has been specified.')
+class MozrunnerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.pids = []
+ self.threads = []
+
+ self.profile = mozprofile.FirefoxProfile()
+ self.runner = mozrunner.FirefoxRunner(os.environ['BROWSER_PATH'],
+ profile=self.profile)
+
+ def tearDown(self):
+ for thread in self.threads:
+ thread.join()
+
+ self.runner.cleanup()
+
+ # Clean-up any left over and running processes
+ for pid in self.pids:
+ # TODO: Bug 925408
+ # mozprocess is not able yet to kill specific processes
+ pass
diff --git a/testing/mozbase/mozrunner/tests/test_crash.py b/testing/mozbase/mozrunner/tests/test_crash.py
new file mode 100644
index 000000000..455fc5f72
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_crash.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mock
+
+import mozrunnertest
+
+
+class MozrunnerCrashTestCase(mozrunnertest.MozrunnerTestCase):
+
+ @mock.patch('mozcrash.log_crashes', return_value=2)
+ def test_crash_count_with_logger(self, log_crashes):
+ self.assertEqual(self.runner.crashed, 0)
+ self.assertEqual(self.runner.check_for_crashes(), 2)
+ self.assertEqual(self.runner.crashed, 2)
+ self.assertEqual(self.runner.check_for_crashes(), 2)
+ self.assertEqual(self.runner.crashed, 4)
+
+ log_crashes.return_value = 0
+ self.assertEqual(self.runner.check_for_crashes(), 0)
+ self.assertEqual(self.runner.crashed, 4)
+
+ @mock.patch('mozcrash.check_for_crashes', return_value=2)
+ def test_crash_count_without_logger(self, check_for_crashes):
+ self.runner.logger = None
+
+ self.assertEqual(self.runner.crashed, 0)
+ self.assertEqual(self.runner.check_for_crashes(), 2)
+ self.assertEqual(self.runner.crashed, 2)
+ self.assertEqual(self.runner.check_for_crashes(), 2)
+ self.assertEqual(self.runner.crashed, 4)
+
+ check_for_crashes.return_value = 0
+ self.assertEqual(self.runner.check_for_crashes(), 0)
+ self.assertEqual(self.runner.crashed, 4)
diff --git a/testing/mozbase/mozrunner/tests/test_interactive.py b/testing/mozbase/mozrunner/tests/test_interactive.py
new file mode 100644
index 000000000..fe83bf80e
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_interactive.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+import threading
+from time import sleep
+
+import mozrunnertest
+
+
+class RunnerThread(threading.Thread):
+
+ def __init__(self, runner, timeout=10):
+ threading.Thread.__init__(self)
+ self.runner = runner
+ self.timeout = timeout
+
+ def run(self):
+ sleep(self.timeout)
+ self.runner.stop()
+
+
+class MozrunnerInteractiveTestCase(mozrunnertest.MozrunnerTestCase):
+
+ def test_run_interactive(self):
+ """Bug 965183: Run process in interactive mode and call wait()"""
+ pid = self.runner.start(interactive=True)
+ self.pids.append(pid)
+
+ thread = RunnerThread(self.runner, 5)
+ self.threads.append(thread)
+ thread.start()
+
+ # This is a blocking call. So the process should be killed by the thread
+ self.runner.wait()
+ thread.join()
+ self.assertFalse(self.runner.is_running())
+
+ def test_stop_interactive(self):
+ """Bug 965183: Explicitely stop process in interactive mode"""
+ pid = self.runner.start(interactive=True)
+ self.pids.append(pid)
+
+ self.runner.stop()
+
+ def test_wait_after_process_finished(self):
+ """Wait after the process has been stopped should not raise an error"""
+ self.runner.start(interactive=True)
+ sleep(5)
+ self.runner.process_handler.kill()
+
+ returncode = self.runner.wait(1)
+
+ self.assertNotIn(returncode, [None, 0])
+ self.assertIsNotNone(self.runner.process_handler)
diff --git a/testing/mozbase/mozrunner/tests/test_start.py b/testing/mozbase/mozrunner/tests/test_start.py
new file mode 100644
index 000000000..396584e00
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_start.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+from time import sleep
+
+import mozrunnertest
+
+
+class MozrunnerStartTestCase(mozrunnertest.MozrunnerTestCase):
+
+ def test_start_process(self):
+ """Start the process and test properties"""
+ self.assertIsNone(self.runner.process_handler)
+
+ self.runner.start()
+
+ self.assertTrue(self.runner.is_running())
+ self.assertIsNotNone(self.runner.process_handler)
+
+ def test_start_process_called_twice(self):
+ """Start the process twice and test that first process is gone"""
+ self.runner.start()
+ # Bug 925480
+ # Make a copy until mozprocess can kill a specific process
+ process_handler = self.runner.process_handler
+
+ self.runner.start()
+
+ try:
+ self.assertNotIn(process_handler.wait(1), [None, 0])
+ finally:
+ process_handler.kill()
+
+ def test_start_with_timeout(self):
+ """Start the process and set a timeout"""
+ self.runner.start(timeout=2)
+ sleep(5)
+
+ self.assertFalse(self.runner.is_running())
+
+ def test_start_with_outputTimeout(self):
+ """Start the process and set a timeout"""
+ self.runner.start(outputTimeout=2)
+ sleep(15)
+
+ self.assertFalse(self.runner.is_running())
diff --git a/testing/mozbase/mozrunner/tests/test_states.py b/testing/mozbase/mozrunner/tests/test_states.py
new file mode 100644
index 000000000..865e12263
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_states.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+import mozrunner
+
+import mozrunnertest
+
+
+class MozrunnerStatesTestCase(mozrunnertest.MozrunnerTestCase):
+
+ def test_errors_before_start(self):
+ """Bug 965714: Not started errors before start() is called"""
+
+ def test_returncode():
+ return self.runner.returncode
+
+ self.assertRaises(mozrunner.RunnerNotStartedError, self.runner.is_running)
+ self.assertRaises(mozrunner.RunnerNotStartedError, test_returncode)
+ self.assertRaises(mozrunner.RunnerNotStartedError, self.runner.wait)
diff --git a/testing/mozbase/mozrunner/tests/test_stop.py b/testing/mozbase/mozrunner/tests/test_stop.py
new file mode 100644
index 000000000..102d57a4e
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_stop.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import signal
+
+import mozrunnertest
+
+
+class MozrunnerStopTestCase(mozrunnertest.MozrunnerTestCase):
+
+ def test_stop_process(self):
+ """Stop the process and test properties"""
+ self.runner.start()
+ returncode = self.runner.stop()
+
+ self.assertFalse(self.runner.is_running())
+ self.assertNotIn(returncode, [None, 0])
+ self.assertEqual(self.runner.returncode, returncode)
+ self.assertIsNotNone(self.runner.process_handler)
+
+ self.assertEqual(self.runner.wait(1), returncode)
+
+ def test_stop_before_start(self):
+ """Stop the process before it gets started should not raise an error"""
+ self.runner.stop()
+
+ def test_stop_process_custom_signal(self):
+ """Stop the process via a custom signal and test properties"""
+ self.runner.start()
+ returncode = self.runner.stop(signal.SIGTERM)
+
+ self.assertFalse(self.runner.is_running())
+ self.assertNotIn(returncode, [None, 0])
+ self.assertEqual(self.runner.returncode, returncode)
+ self.assertIsNotNone(self.runner.process_handler)
+
+ self.assertEqual(self.runner.wait(1), returncode)
diff --git a/testing/mozbase/mozrunner/tests/test_threads.py b/testing/mozbase/mozrunner/tests/test_threads.py
new file mode 100644
index 000000000..4b9b4cfc3
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_threads.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import threading
+from time import sleep
+
+import mozrunnertest
+
+
+class RunnerThread(threading.Thread):
+
+ def __init__(self, runner, do_start, timeout=10):
+ threading.Thread.__init__(self)
+ self.runner = runner
+ self.timeout = timeout
+ self.do_start = do_start
+
+ def run(self):
+ sleep(self.timeout)
+ if self.do_start:
+ self.runner.start()
+ else:
+ self.runner.stop()
+
+
+class MozrunnerThreadsTestCase(mozrunnertest.MozrunnerTestCase):
+
+ def test_process_start_via_thread(self):
+ """Start the runner via a thread"""
+ thread = RunnerThread(self.runner, True, 2)
+ self.threads.append(thread)
+
+ thread.start()
+ thread.join()
+
+ self.assertTrue(self.runner.is_running())
+
+ def test_process_stop_via_multiple_threads(self):
+ """Stop the runner via multiple threads"""
+ self.runner.start()
+ for i in range(5):
+ thread = RunnerThread(self.runner, False, 5)
+ self.threads.append(thread)
+ thread.start()
+
+ # Wait until the process has been stopped by another thread
+ for thread in self.threads:
+ thread.join()
+ returncode = self.runner.wait(2)
+
+ self.assertNotIn(returncode, [None, 0])
+ self.assertEqual(self.runner.returncode, returncode)
+ self.assertIsNotNone(self.runner.process_handler)
+ self.assertEqual(self.runner.wait(10), returncode)
+
+ def test_process_post_stop_via_thread(self):
+ """Stop the runner and try it again with a thread a bit later"""
+ self.runner.start()
+ thread = RunnerThread(self.runner, False, 5)
+ self.threads.append(thread)
+ thread.start()
+
+ # Wait a bit to start the application gets started
+ self.runner.wait(2)
+ returncode = self.runner.stop()
+ thread.join()
+
+ self.assertNotIn(returncode, [None, 0])
+ self.assertEqual(self.runner.returncode, returncode)
+ self.assertIsNotNone(self.runner.process_handler)
+ self.assertEqual(self.runner.wait(10), returncode)
diff --git a/testing/mozbase/mozrunner/tests/test_wait.py b/testing/mozbase/mozrunner/tests/test_wait.py
new file mode 100644
index 000000000..8da1efc3c
--- /dev/null
+++ b/testing/mozbase/mozrunner/tests/test_wait.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozrunnertest
+
+
+class MozrunnerWaitTestCase(mozrunnertest.MozrunnerTestCase):
+
+ def test_wait_while_running(self):
+ """Wait for the process while it is running"""
+ self.runner.start()
+ returncode = self.runner.wait(1)
+
+ self.assertTrue(self.runner.is_running())
+ self.assertEqual(returncode, None)
+ self.assertEqual(self.runner.returncode, returncode)
+ self.assertIsNotNone(self.runner.process_handler)
+
+ def test_wait_after_process_finished(self):
+ """Bug 965714: wait() after stop should not raise an error"""
+ self.runner.start()
+ self.runner.process_handler.kill()
+
+ returncode = self.runner.wait(1)
+
+ self.assertNotIn(returncode, [None, 0])
+ self.assertIsNotNone(self.runner.process_handler)
diff --git a/testing/mozbase/mozscreenshot/mozscreenshot/__init__.py b/testing/mozbase/mozscreenshot/mozscreenshot/__init__.py
new file mode 100644
index 000000000..56c62cb23
--- /dev/null
+++ b/testing/mozbase/mozscreenshot/mozscreenshot/__init__.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import mozinfo
+import tempfile
+import subprocess
+from mozlog.formatters.process import strstatus
+
+
+def printstatus(name, returncode):
+ """
+ print the status of a command exit code, formatted for tbpl.
+
+ Note that mozlog structured action "process_exit" should be used
+ instead of that in new code.
+ """
+ print "TEST-INFO | %s: %s" % (name, strstatus(returncode))
+
+
+def dump_screen(utilityPath, log):
+ """dumps a screenshot of the entire screen to a directory specified by
+ the MOZ_UPLOAD_DIR environment variable"""
+
+ is_structured_log = hasattr(log, 'process_exit')
+
+ # Need to figure out which OS-dependent tool to use
+ if mozinfo.isUnix:
+ utility = [os.path.join(utilityPath, "screentopng")]
+ utilityname = "screentopng"
+ elif mozinfo.isMac:
+ utility = ['/usr/sbin/screencapture', '-C', '-x', '-t', 'png']
+ utilityname = "screencapture"
+ elif mozinfo.isWin:
+ utility = [os.path.join(utilityPath, "screenshot.exe")]
+ utilityname = "screenshot"
+
+ # Get dir where to write the screenshot file
+ parent_dir = os.environ.get('MOZ_UPLOAD_DIR', None)
+ if not parent_dir:
+ log.info('Failed to retrieve MOZ_UPLOAD_DIR env var')
+ return
+
+ # Run the capture
+ try:
+ tmpfd, imgfilename = tempfile.mkstemp(
+ prefix='mozilla-test-fail-screenshot_',
+ suffix='.png', dir=parent_dir
+ )
+ os.close(tmpfd)
+ if is_structured_log:
+ log.process_start(utilityname)
+ returncode = subprocess.call(utility + [imgfilename])
+ if is_structured_log:
+ log.process_exit(utilityname, returncode)
+ else:
+ printstatus(utilityname, returncode)
+ except OSError, err:
+ log.info("Failed to start %s for screenshot: %s"
+ % (utility[0], err.strerror))
diff --git a/testing/mozbase/mozscreenshot/setup.py b/testing/mozbase/mozscreenshot/setup.py
new file mode 100644
index 000000000..fbc147462
--- /dev/null
+++ b/testing/mozbase/mozscreenshot/setup.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+
+PACKAGE_NAME = 'mozscreenshot'
+PACKAGE_VERSION = '0.1'
+
+
+setup(
+ name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Library for taking screenshots in tests harness",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozscreenshot'],
+ zip_safe=False,
+ install_requires=['mozlog', 'mozinfo'],
+)
diff --git a/testing/mozbase/mozsystemmonitor/README.rst b/testing/mozbase/mozsystemmonitor/README.rst
new file mode 100644
index 000000000..65756d44d
--- /dev/null
+++ b/testing/mozbase/mozsystemmonitor/README.rst
@@ -0,0 +1,13 @@
+================
+mozsystemmonitor
+================
+
+mozsystemmonitor contains modules for monitoring a running system.
+
+SystemResourceMonitor
+=====================
+
+mozsystemmonitor.resourcemonitor.SystemResourceMonitor is class used to
+measure system resource usage. It is useful to get a handle on what an
+overall system is doing.
+
diff --git a/testing/mozbase/mozsystemmonitor/mozsystemmonitor/__init__.py b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/__init__.py
diff --git a/testing/mozbase/mozsystemmonitor/mozsystemmonitor/resourcemonitor.py b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/resourcemonitor.py
new file mode 100644
index 000000000..8e908bf43
--- /dev/null
+++ b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/resourcemonitor.py
@@ -0,0 +1,676 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from contextlib import contextmanager
+import multiprocessing
+import sys
+import time
+import warnings
+
+from collections import (
+ OrderedDict,
+ namedtuple,
+)
+
+
+class PsutilStub(object):
+
+ def __init__(self):
+ self.sswap = namedtuple('sswap', ['total', 'used', 'free', 'percent', 'sin',
+ 'sout'])
+ self.sdiskio = namedtuple('sdiskio', ['read_count', 'write_count',
+ 'read_bytes', 'write_bytes',
+ 'read_time', 'write_time'])
+ self.pcputimes = namedtuple('pcputimes', ['user', 'system'])
+ self.svmem = namedtuple(
+ 'svmem', ['total', 'available', 'percent', 'used', 'free',
+ 'active', 'inactive', 'buffers', 'cached'])
+
+ def cpu_percent(self, a, b):
+ return [0]
+
+ def cpu_times(self, percpu):
+ if percpu:
+ return [self.pcputimes(0, 0)]
+ else:
+ return self.pcputimes(0, 0)
+
+ def disk_io_counters(self):
+ return self.sdiskio(0, 0, 0, 0, 0, 0)
+
+ def swap_memory(self):
+ return self.sswap(0, 0, 0, 0, 0, 0)
+
+ def virtual_memory(self):
+ return self.svmem(0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+
+# psutil will raise NotImplementedError if the platform is not supported.
+try:
+ import psutil
+ have_psutil = True
+except Exception:
+ try:
+ # The PsutilStub should get us time intervals, at least
+ psutil = PsutilStub()
+ except Exception:
+ psutil = None
+
+ have_psutil = False
+
+
+def get_disk_io_counters():
+ try:
+ io_counters = psutil.disk_io_counters()
+ except RuntimeError:
+ io_counters = []
+
+ return io_counters
+
+
+def _collect(pipe, poll_interval):
+ """Collects system metrics.
+
+ This is the main function for the background process. It collects
+ data then forwards it on a pipe until told to stop.
+ """
+
+ data = []
+
+ # Establish initial values.
+
+ # We should ideally use a monotonic clock. However, Python 2.7 doesn't
+ # make a monotonic clock available on all platforms. Python 3.3 does!
+ last_time = time.time()
+ io_last = get_disk_io_counters()
+ cpu_last = psutil.cpu_times(True)
+ swap_last = psutil.swap_memory()
+ psutil.cpu_percent(None, True)
+
+ sin_index = swap_last._fields.index('sin')
+ sout_index = swap_last._fields.index('sout')
+
+ sleep_interval = poll_interval
+
+ while not pipe.poll(sleep_interval):
+ io = get_disk_io_counters()
+ cpu_times = psutil.cpu_times(True)
+ cpu_percent = psutil.cpu_percent(None, True)
+ virt_mem = psutil.virtual_memory()
+ swap_mem = psutil.swap_memory()
+ measured_end_time = time.time()
+
+ # TODO Does this wrap? At 32 bits? At 64 bits?
+ # TODO Consider patching "delta" API to upstream.
+ io_diff = [v - io_last[i] for i, v in enumerate(io)]
+ io_last = io
+
+ cpu_diff = []
+ for core, values in enumerate(cpu_times):
+ cpu_diff.append([v - cpu_last[core][i] for i, v in
+ enumerate(values)])
+
+ cpu_last = cpu_times
+
+ swap_entry = list(swap_mem)
+ swap_entry[sin_index] = swap_mem.sin - swap_last.sin
+ swap_entry[sout_index] = swap_mem.sout - swap_last.sout
+ swap_last = swap_mem
+
+ data.append((last_time, measured_end_time, io_diff, cpu_diff,
+ cpu_percent, list(virt_mem), swap_entry))
+
+ collection_overhead = time.time() - last_time - poll_interval
+ last_time = measured_end_time
+ sleep_interval = max(0, poll_interval - collection_overhead)
+
+ for entry in data:
+ pipe.send(entry)
+
+ pipe.send(('done', None, None, None, None, None, None))
+ pipe.close()
+ sys.exit(0)
+
+
+SystemResourceUsage = namedtuple('SystemResourceUsage',
+ ['start', 'end',
+ 'cpu_times', 'cpu_percent', 'io', 'virt', 'swap'])
+
+
+class SystemResourceMonitor(object):
+ """Measures system resources.
+
+ Each instance measures system resources from the time it is started
+ until it is finished. It does this on a separate process so it doesn't
+ impact execution of the main Python process.
+
+ Each instance is a one-shot instance. It cannot be used to record multiple
+ durations.
+
+ Aside from basic data gathering, the class supports basic analysis
+ capabilities. You can query for data between ranges. You can also tell it
+ when certain events occur and later grab data relevant to those events or
+ plot those events on a timeline.
+
+ The resource monitor works by periodically polling the state of the
+ system. By default, it polls every second. This can be adjusted depending
+ on the required granularity of the data and considerations for probe
+ overhead. It tries to probe at the interval specified. However, variations
+ should be expected. Fast and well-behaving systems should experience
+ variations in the 1ms range. Larger variations may exist if the system is
+ under heavy load or depending on how accurate socket polling is on your
+ system.
+
+ In its current implementation, data is not available until collection has
+ stopped. This may change in future iterations.
+
+ Usage
+ =====
+
+ monitor = SystemResourceMonitor()
+ monitor.start()
+
+ # Record that a single event in time just occurred.
+ foo.do_stuff()
+ monitor.record_event('foo_did_stuff')
+
+ # Record that we're about to perform a possibly long-running event.
+ with monitor.phase('long_job'):
+ foo.do_long_running_job()
+
+ # Stop recording. Currently we need to stop before data is available.
+ monitor.stop()
+
+ # Obtain the raw data for the entire probed range.
+ print('CPU Usage:')
+ for core in monitor.aggregate_cpu():
+ print(core)
+
+ # We can also request data corresponding to a specific phase.
+ for data in monitor.phase_usage('long_job'):
+ print(data.cpu_percent)
+ """
+
+ # The interprocess communication is complicated enough to warrant
+ # explanation. To work around the Python GIL, we launch a separate
+ # background process whose only job is to collect metrics. If we performed
+ # collection in the main process, the polling interval would be
+ # inconsistent if a long-running function were on the stack. Since the
+ # child process is independent of the instantiating process, data
+ # collection should be evenly spaced.
+ #
+ # As the child process collects data, it buffers it locally. When
+ # collection stops, it flushes all that data to a pipe to be read by
+ # the parent process.
+
+ def __init__(self, poll_interval=1.0):
+ """Instantiate a system resource monitor instance.
+
+ The instance is configured with a poll interval. This is the interval
+ between samples, in float seconds.
+ """
+ self.start_time = None
+ self.end_time = None
+
+ self.events = []
+ self.phases = OrderedDict()
+
+ self._active_phases = {}
+
+ self._running = False
+ self._stopped = False
+ self._process = None
+
+ if psutil is None:
+ return
+
+ # This try..except should not be needed! However, some tools (like
+ # |mach build|) attempt to load psutil before properly creating a
+ # virtualenv by building psutil. As a result, python/psutil may be in
+ # sys.path and its .py files may pick up the psutil C extension from
+ # the system install. If the versions don't match, we typically see
+ # failures invoking one of these functions.
+ try:
+ cpu_percent = psutil.cpu_percent(0.0, True)
+ cpu_times = psutil.cpu_times(False)
+ io = get_disk_io_counters()
+ virt = psutil.virtual_memory()
+ swap = psutil.swap_memory()
+ except Exception as e:
+ warnings.warn('psutil failed to run: %s' % e)
+ return
+
+ self._cpu_cores = len(cpu_percent)
+ self._cpu_times_type = type(cpu_times)
+ self._cpu_times_len = len(cpu_times)
+ self._io_type = type(io)
+ self._io_len = len(io)
+ self._virt_type = type(virt)
+ self._virt_len = len(virt)
+ self._swap_type = type(swap)
+ self._swap_len = len(swap)
+
+ self._pipe, child_pipe = multiprocessing.Pipe(True)
+
+ self._process = multiprocessing.Process(None, _collect,
+ args=(child_pipe, poll_interval))
+
+ def __del__(self):
+ if self._running:
+ self._pipe.send(('terminate',))
+ self._process.join()
+
+ # Methods to control monitoring.
+
+ def start(self):
+ """Start measuring system-wide CPU resource utilization.
+
+ You should only call this once per instance.
+ """
+ if not self._process:
+ return
+
+ self._running = True
+ self._process.start()
+
+ def stop(self):
+ """Stop measuring system-wide CPU resource utilization.
+
+ You should call this if and only if you have called start(). You should
+ always pair a stop() with a start().
+
+ Currently, data is not available until you call stop().
+ """
+ if not self._process:
+ self._stopped = True
+ return
+
+ assert self._running
+ assert not self._stopped
+
+ self._pipe.send(('terminate',))
+ self._running = False
+ self._stopped = True
+
+ self.measurements = []
+
+ done = False
+
+ # The child process will send each data sample over the pipe
+ # as a separate data structure. When it has finished sending
+ # samples, it sends a special "done" message to indicate it
+ # is finished.
+ while self._pipe.poll(1.0):
+ start_time, end_time, io_diff, cpu_diff, cpu_percent, virt_mem, \
+ swap_mem = self._pipe.recv()
+
+ # There should be nothing after the "done" message so
+ # terminate.
+ if start_time == 'done':
+ done = True
+ break
+
+ io = self._io_type(*io_diff)
+ virt = self._virt_type(*virt_mem)
+ swap = self._swap_type(*swap_mem)
+ cpu_times = [self._cpu_times_type(*v) for v in cpu_diff]
+
+ self.measurements.append(SystemResourceUsage(start_time, end_time,
+ cpu_times, cpu_percent, io, virt, swap))
+
+ # We establish a timeout so we don't hang forever if the child
+ # process has crashed.
+ self._process.join(10)
+ if self._process.is_alive():
+ self._process.terminate()
+ self._process.join(10)
+ else:
+ # We should have received a "done" message from the
+ # child indicating it shut down properly. This only
+ # happens if the child shuts down cleanly.
+ assert done
+
+ if len(self.measurements):
+ self.start_time = self.measurements[0].start
+ self.end_time = self.measurements[-1].end
+
+ # Methods to record events alongside the monitored data.
+
+ def record_event(self, name):
+ """Record an event as occuring now.
+
+ Events are actions that occur at a specific point in time. If you are
+ looking for an action that has a duration, see the phase API below.
+ """
+ self.events.append((time.time(), name))
+
+ @contextmanager
+ def phase(self, name):
+ """Context manager for recording an active phase."""
+ self.begin_phase(name)
+ yield
+ self.finish_phase(name)
+
+ def begin_phase(self, name):
+ """Record the start of a phase.
+
+ Phases are actions that have a duration. Multiple phases can be active
+ simultaneously. Phases can be closed in any order.
+
+ Keep in mind that if phases occur in parallel, it will become difficult
+ to isolate resource utilization specific to individual phases.
+ """
+ assert name not in self._active_phases
+
+ self._active_phases[name] = time.time()
+
+ def finish_phase(self, name):
+ """Record the end of a phase."""
+
+ assert name in self._active_phases
+
+ phase = (self._active_phases[name], time.time())
+ self.phases[name] = phase
+ del self._active_phases[name]
+
+ return phase[1] - phase[0]
+
+ # Methods to query data.
+
+ def range_usage(self, start=None, end=None):
+ """Obtain the usage data falling within the given time range.
+
+ This is a generator of SystemResourceUsage.
+
+ If no time range bounds are given, all data is returned.
+ """
+ if not self._stopped or self.start_time is None:
+ return
+
+ if start is None:
+ start = self.start_time
+
+ if end is None:
+ end = self.end_time
+
+ for entry in self.measurements:
+ if entry.start < start:
+ continue
+
+ if entry.end > end:
+ break
+
+ yield entry
+
+ def phase_usage(self, phase):
+ """Obtain usage data for a specific phase.
+
+ This is a generator of SystemResourceUsage.
+ """
+ time_start, time_end = self.phases[phase]
+
+ return self.range_usage(time_start, time_end)
+
+ def between_events_usage(self, start_event, end_event):
+ """Obtain usage data between two point events.
+
+ This is a generator of SystemResourceUsage.
+ """
+ start_time = None
+ end_time = None
+
+ for t, name in self.events:
+ if name == start_event:
+ start_time = t
+ elif name == end_event:
+ end_time = t
+
+ if start_time is None:
+ raise Exception('Could not find start event: %s' % start_event)
+
+ if end_time is None:
+ raise Exception('Could not find end event: %s' % end_event)
+
+ return self.range_usage(start_time, end_time)
+
+ def aggregate_cpu_percent(self, start=None, end=None, phase=None,
+ per_cpu=True):
+ """Obtain the aggregate CPU percent usage for a range.
+
+ Returns a list of floats representing average CPU usage percentage per
+ core if per_cpu is True (the default). If per_cpu is False, return a
+ single percentage value.
+
+ By default this will return data for the entire instrumented interval.
+ If phase is defined, data for a named phase will be returned. If start
+ and end are defined, these times will be fed into range_usage().
+ """
+ cpu = [[] for i in range(0, self._cpu_cores)]
+
+ if phase:
+ data = self.phase_usage(phase)
+ else:
+ data = self.range_usage(start, end)
+
+ for usage in data:
+ for i, v in enumerate(usage.cpu_percent):
+ cpu[i].append(v)
+
+ samples = len(cpu[0])
+
+ if not samples:
+ return None
+
+ if per_cpu:
+ return [sum(x) / samples for x in cpu]
+
+ cores = [sum(x) for x in cpu]
+
+ return sum(cores) / len(cpu) / samples
+
+ def aggregate_cpu_times(self, start=None, end=None, phase=None,
+ per_cpu=True):
+ """Obtain the aggregate CPU times for a range.
+
+ If per_cpu is True (the default), this returns a list of named tuples.
+ Each tuple is as if it were returned by psutil.cpu_times(). If per_cpu
+ is False, this returns a single named tuple of the aforementioned type.
+ """
+ empty = [0 for i in range(0, self._cpu_times_len)]
+ cpu = [list(empty) for i in range(0, self._cpu_cores)]
+
+ if phase:
+ data = self.phase_usage(phase)
+ else:
+ data = self.range_usage(start, end)
+
+ for usage in data:
+ for i, core_values in enumerate(usage.cpu_times):
+ for j, v in enumerate(core_values):
+ cpu[i][j] += v
+
+ if per_cpu:
+ return [self._cpu_times_type(*v) for v in cpu]
+
+ sums = list(empty)
+ for core in cpu:
+ for i, v in enumerate(core):
+ sums[i] += v
+
+ return self._cpu_times_type(*sums)
+
+ def aggregate_io(self, start=None, end=None, phase=None):
+ """Obtain aggregate I/O counters for a range.
+
+ Returns an iostat named tuple from psutil.
+ """
+
+ io = [0 for i in range(self._io_len)]
+
+ if phase:
+ data = self.phase_usage(phase)
+ else:
+ data = self.range_usage(start, end)
+
+ for usage in data:
+ for i, v in enumerate(usage.io):
+ io[i] += v
+
+ return self._io_type(*io)
+
+ def min_memory_available(self, start=None, end=None, phase=None):
+ """Return the minimum observed available memory number from a range.
+
+ Returns long bytes of memory available.
+
+ See psutil for notes on how this is calculated.
+ """
+ if phase:
+ data = self.phase_usage(phase)
+ else:
+ data = self.range_usage(start, end)
+
+ values = []
+
+ for usage in data:
+ values.append(usage.virt.available)
+
+ return min(values)
+
+ def max_memory_percent(self, start=None, end=None, phase=None):
+ """Returns the maximum percentage of system memory used.
+
+ Returns a float percentage. 1.00 would mean all system memory was in
+ use at one point.
+ """
+ if phase:
+ data = self.phase_usage(phase)
+ else:
+ data = self.range_usage(start, end)
+
+ values = []
+
+ for usage in data:
+ values.append(usage.virt.percent)
+
+ return max(values)
+
+ def as_dict(self):
+ """Convert the recorded data to a dict, suitable for serialization.
+
+ The returned dict has the following keys:
+
+ version - Integer version number being rendered. Currently 2.
+ cpu_times_fields - A list of the names of the CPU times fields.
+ io_fields - A list of the names of the I/O fields.
+ virt_fields - A list of the names of the virtual memory fields.
+ swap_fields - A list of the names of the swap memory fields.
+ samples - A list of dicts containing low-level measurements.
+ events - A list of lists representing point events. The inner list
+ has 2 elements, the float wall time of the event and the string
+ event name.
+ phases - A list of dicts describing phases. Each phase looks a lot
+ like an entry from samples (see below). Some phases may not have
+ data recorded against them, so some keys may be None.
+ overall - A dict representing overall resource usage. This resembles
+ a sample entry.
+ system - Contains additional information about the system including
+ number of processors and amount of memory.
+
+ Each entry in the sample list is a dict with the following keys:
+
+ start - Float wall time this measurement began on.
+ end - Float wall time this measurement ended on.
+ io - List of numerics for I/O values.
+ virt - List of numerics for virtual memory values.
+ swap - List of numerics for swap memory values.
+ cpu_percent - List of floats representing CPU percent on each core.
+ cpu_times - List of lists. Main list is each core. Inner lists are
+ lists of floats representing CPU times on that core.
+ cpu_percent_mean - Float of mean CPU percent across all cores.
+ cpu_times_sum - List of floats representing the sum of CPU times
+ across all cores.
+ cpu_times_total - Float representing the sum of all CPU times across
+ all cores. This is useful for calculating the percent in each CPU
+ time.
+ """
+
+ o = dict(
+ version=2,
+ cpu_times_fields=list(self._cpu_times_type._fields),
+ io_fields=list(self._io_type._fields),
+ virt_fields=list(self._virt_type._fields),
+ swap_fields=list(self._swap_type._fields),
+ samples=[],
+ phases=[],
+ system={},
+ )
+
+ def populate_derived(e):
+ if e['cpu_percent_cores']:
+ e['cpu_percent_mean'] = sum(e['cpu_percent_cores']) / \
+ len(e['cpu_percent_cores'])
+ else:
+ e['cpu_percent_mean'] = None
+
+ if e['cpu_times']:
+ e['cpu_times_sum'] = [0.0] * self._cpu_times_len
+ for i in range(0, self._cpu_times_len):
+ e['cpu_times_sum'][i] = sum(core[i] for core in e['cpu_times'])
+
+ e['cpu_times_total'] = sum(e['cpu_times_sum'])
+
+ def phase_entry(name, start, end):
+ e = dict(
+ name=name,
+ start=start,
+ end=end,
+ duration=end - start,
+ cpu_percent_cores=self.aggregate_cpu_percent(phase=name),
+ cpu_times=[list(c) for c in
+ self.aggregate_cpu_times(phase=name)],
+ io=list(self.aggregate_io(phase=name)),
+ )
+ populate_derived(e)
+ return e
+
+ for m in self.measurements:
+ e = dict(
+ start=m.start,
+ end=m.end,
+ io=list(m.io),
+ virt=list(m.virt),
+ swap=list(m.swap),
+ cpu_percent_cores=list(m.cpu_percent),
+ cpu_times=list(list(cpu) for cpu in m.cpu_times)
+ )
+
+ populate_derived(e)
+ o['samples'].append(e)
+
+ if o['samples']:
+ o['start'] = o['samples'][0]['start']
+ o['end'] = o['samples'][-1]['end']
+ o['duration'] = o['end'] - o['start']
+ o['overall'] = phase_entry(None, o['start'], o['end'])
+ else:
+ o['start'] = None
+ o['end'] = None
+ o['duration'] = None
+ o['overall'] = None
+
+ o['events'] = [list(ev) for ev in self.events]
+
+ for phase, v in self.phases.items():
+ o['phases'].append(phase_entry(phase, v[0], v[1]))
+
+ if have_psutil:
+ o['system'].update(dict(
+ cpu_logical_count=psutil.cpu_count(logical=True),
+ cpu_physical_count=psutil.cpu_count(logical=False),
+ swap_total=psutil.swap_memory()[0],
+ vmem_total=psutil.virtual_memory()[0],
+ ))
+
+ return o
diff --git a/testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/__init__.py b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/__init__.py
diff --git a/testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/test_resource_monitor.py b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/test_resource_monitor.py
new file mode 100644
index 000000000..b6763e20e
--- /dev/null
+++ b/testing/mozbase/mozsystemmonitor/mozsystemmonitor/test/test_resource_monitor.py
@@ -0,0 +1,180 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import multiprocessing
+import time
+import unittest
+
+try:
+ import psutil
+except ImportError:
+ psutil = None
+
+from mozsystemmonitor.resourcemonitor import (
+ SystemResourceMonitor,
+ SystemResourceUsage,
+)
+
+
+@unittest.skipIf(psutil is None, 'Resource monitor requires psutil.')
+class TestResourceMonitor(unittest.TestCase):
+
+ def test_basic(self):
+ monitor = SystemResourceMonitor(poll_interval=0.5)
+
+ monitor.start()
+ time.sleep(3)
+
+ monitor.stop()
+
+ data = list(monitor.range_usage())
+ self.assertGreater(len(data), 3)
+
+ self.assertIsInstance(data[0], SystemResourceUsage)
+
+ def test_empty(self):
+ monitor = SystemResourceMonitor(poll_interval=2.0)
+ monitor.start()
+ monitor.stop()
+
+ data = list(monitor.range_usage())
+ self.assertEqual(len(data), 0)
+
+ def test_phases(self):
+ monitor = SystemResourceMonitor(poll_interval=0.25)
+
+ monitor.start()
+ time.sleep(1)
+
+ with monitor.phase('phase1'):
+ time.sleep(1)
+
+ with monitor.phase('phase2'):
+ time.sleep(1)
+
+ monitor.stop()
+
+ self.assertEqual(len(monitor.phases), 2)
+ self.assertEqual(['phase2', 'phase1'], monitor.phases.keys())
+
+ all = list(monitor.range_usage())
+ data1 = list(monitor.phase_usage('phase1'))
+ data2 = list(monitor.phase_usage('phase2'))
+
+ self.assertGreater(len(all), len(data1))
+ self.assertGreater(len(data1), len(data2))
+
+ # This could fail if time.time() takes more than 0.1s. It really
+ # shouldn't.
+ self.assertAlmostEqual(data1[-1].end, data2[-1].end, delta=0.25)
+
+ def test_no_data(self):
+ monitor = SystemResourceMonitor()
+
+ data = list(monitor.range_usage())
+ self.assertEqual(len(data), 0)
+
+ def test_events(self):
+ monitor = SystemResourceMonitor(poll_interval=0.25)
+
+ monitor.start()
+ time.sleep(0.5)
+
+ t0 = time.time()
+ monitor.record_event('t0')
+ time.sleep(0.5)
+
+ monitor.record_event('t1')
+ time.sleep(0.5)
+ monitor.stop()
+
+ events = monitor.events
+ self.assertEqual(len(events), 2)
+
+ event = events[0]
+
+ self.assertEqual(event[1], 't0')
+ self.assertAlmostEqual(event[0], t0, delta=0.25)
+
+ data = list(monitor.between_events_usage('t0', 't1'))
+ self.assertGreater(len(data), 0)
+
+ def test_aggregate_cpu(self):
+ monitor = SystemResourceMonitor(poll_interval=0.25)
+
+ monitor.start()
+ time.sleep(1)
+ monitor.stop()
+
+ values = monitor.aggregate_cpu_percent()
+ self.assertIsInstance(values, list)
+ self.assertEqual(len(values), multiprocessing.cpu_count())
+ for v in values:
+ self.assertIsInstance(v, float)
+
+ value = monitor.aggregate_cpu_percent(per_cpu=False)
+ self.assertIsInstance(value, float)
+
+ values = monitor.aggregate_cpu_times()
+ self.assertIsInstance(values, list)
+ self.assertGreater(len(values), 0)
+ self.assertTrue(hasattr(values[0], 'user'))
+
+ t = type(values[0])
+
+ value = monitor.aggregate_cpu_times(per_cpu=False)
+ self.assertIsInstance(value, t)
+
+ def test_aggregate_io(self):
+ monitor = SystemResourceMonitor(poll_interval=0.25)
+
+ # There's really no easy way to ensure I/O occurs. For all we know
+ # reads and writes will all be serviced by the page cache.
+ monitor.start()
+ time.sleep(1.0)
+ monitor.stop()
+
+ values = monitor.aggregate_io()
+ self.assertTrue(hasattr(values, 'read_count'))
+
+ def test_memory(self):
+ monitor = SystemResourceMonitor(poll_interval=0.25)
+
+ monitor.start()
+ time.sleep(1.0)
+ monitor.stop()
+
+ v = monitor.min_memory_available()
+ self.assertIsInstance(v, long)
+
+ v = monitor.max_memory_percent()
+ self.assertIsInstance(v, float)
+
+ def test_as_dict(self):
+ monitor = SystemResourceMonitor(poll_interval=0.25)
+
+ monitor.start()
+ time.sleep(0.1)
+ monitor.begin_phase('phase1')
+ monitor.record_event('foo')
+ time.sleep(0.1)
+ monitor.begin_phase('phase2')
+ monitor.record_event('bar')
+ time.sleep(0.2)
+ monitor.finish_phase('phase1')
+ time.sleep(0.2)
+ monitor.finish_phase('phase2')
+ time.sleep(0.4)
+ monitor.stop()
+
+ d = monitor.as_dict()
+
+ self.assertEqual(d['version'], 2)
+ self.assertEqual(len(d['events']), 2)
+ self.assertEqual(len(d['phases']), 2)
+ self.assertIn('system', d)
+ self.assertIsInstance(d['system'], dict)
+ self.assertIsInstance(d['overall'], dict)
+ self.assertIn('duration', d['overall'])
+ self.assertIn('cpu_times', d['overall'])
diff --git a/testing/mozbase/mozsystemmonitor/setup.py b/testing/mozbase/mozsystemmonitor/setup.py
new file mode 100644
index 000000000..ecfa28805
--- /dev/null
+++ b/testing/mozbase/mozsystemmonitor/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+from setuptools import setup
+
+PACKAGE_VERSION = '0.3'
+
+try:
+ pwd = os.path.dirname(os.path.abspath(__file__))
+ description = open(os.path.join(pwd, 'README.rst')).read()
+except:
+ description = ''
+
+setup(
+ name='mozsystemmonitor',
+ description='Monitor system resource usage.',
+ long_description=description,
+ license='MPL 2.0',
+ keywords='mozilla',
+ author='Mozilla Automation and Tools Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ packages=['mozsystemmonitor'],
+ version=PACKAGE_VERSION,
+ install_requires=['psutil >= 3.1.1'],
+)
diff --git a/testing/mozbase/moztest/moztest/__init__.py b/testing/mozbase/moztest/moztest/__init__.py
new file mode 100644
index 000000000..5820ed2eb
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/__init__.py
@@ -0,0 +1,7 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import adapters
+
+__all__ = ['adapters']
diff --git a/testing/mozbase/moztest/moztest/adapters/__init__.py b/testing/mozbase/moztest/moztest/adapters/__init__.py
new file mode 100644
index 000000000..46bb3c6eb
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/adapters/__init__.py
@@ -0,0 +1,7 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unit
+
+__all__ = ['unit']
diff --git a/testing/mozbase/moztest/moztest/adapters/unit.py b/testing/mozbase/moztest/moztest/adapters/unit.py
new file mode 100644
index 000000000..cee0e05e9
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/adapters/unit.py
@@ -0,0 +1,225 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import sys
+import time
+import traceback
+
+try:
+ from unittest import TextTestResult
+except ImportError:
+ # bug 971243 - python 2.6 compatibilty
+ from unittest import _TextTestResult as TextTestResult
+
+"""Adapter used to output structuredlog messages from unittest
+testsuites"""
+
+
+def get_test_class_name(test):
+ """
+ This method is used to return the full class name from a
+ :class:`unittest.TestCase` instance.
+
+ It is used as a default to define the "class_name" extra value
+ passed in structured loggers. You can override the default by
+ implementing a "get_test_class_name" method on you TestCase subclass.
+ """
+ return "%s.%s" % (test.__class__.__module__,
+ test.__class__.__name__)
+
+
+def get_test_method_name(test):
+ """
+ This method is used to return the full method name from a
+ :class:`unittest.TestCase` instance.
+
+ It is used as a default to define the "method_name" extra value
+ passed in structured loggers. You can override the default by
+ implementing a "get_test_method_name" method on you TestCase subclass.
+ """
+ return test._testMethodName
+
+
+class StructuredTestResult(TextTestResult):
+
+ def __init__(self, *args, **kwargs):
+ self.logger = kwargs.pop('logger')
+ self.test_list = kwargs.pop("test_list", [])
+ self.result_callbacks = kwargs.pop('result_callbacks', [])
+ self.passed = 0
+ self.testsRun = 0
+ TextTestResult.__init__(self, *args, **kwargs)
+
+ def call_callbacks(self, test, status):
+ debug_info = {}
+ for callback in self.result_callbacks:
+ info = callback(test, status)
+ if info is not None:
+ debug_info.update(info)
+ return debug_info
+
+ def startTestRun(self):
+ # This would be an opportunity to call the logger's suite_start action,
+ # however some users may use multiple suites, and per the structured
+ # logging protocol, this action should only be called once.
+ pass
+
+ def startTest(self, test):
+ self.testsRun += 1
+ self.logger.test_start(test.id())
+
+ def stopTest(self, test):
+ pass
+
+ def stopTestRun(self):
+ # This would be an opportunity to call the logger's suite_end action,
+ # however some users may use multiple suites, and per the structured
+ # logging protocol, this action should only be called once.
+ pass
+
+ def _extract_err_message(self, err):
+ # Format an exception message in the style of unittest's _exc_info_to_string
+ # while maintaining a division between a traceback and a message.
+ exc_ty, val, _ = err
+ exc_msg = "".join(traceback.format_exception_only(exc_ty, val))
+ if self.buffer:
+ output_msg = "\n".join([sys.stdout.getvalue(), sys.stderr.getvalue()])
+ return "".join([exc_msg, output_msg])
+ return exc_msg.rstrip()
+
+ def _extract_stacktrace(self, err, test):
+ # Format an exception stack in the style of unittest's _exc_info_to_string
+ # while maintaining a division between a traceback and a message.
+ # This is mostly borrowed from unittest.result._exc_info_to_string.
+
+ exctype, value, tb = err
+ while tb and self._is_relevant_tb_level(tb):
+ tb = tb.tb_next
+ # Header usually included by print_exception
+ lines = ["Traceback (most recent call last):\n"]
+ if exctype is test.failureException:
+ length = self._count_relevant_tb_levels(tb)
+ lines += traceback.format_tb(tb, length)
+ else:
+ lines += traceback.format_tb(tb)
+ return "".join(lines)
+
+ def _get_class_method_name(self, test):
+ if hasattr(test, 'get_test_class_name'):
+ class_name = test.get_test_class_name()
+ else:
+ class_name = get_test_class_name(test)
+
+ if hasattr(test, 'get_test_method_name'):
+ method_name = test.get_test_method_name()
+ else:
+ method_name = get_test_method_name(test)
+
+ return {
+ 'class_name': class_name,
+ 'method_name': method_name
+ }
+
+ def addError(self, test, err):
+ self.errors.append((test, self._exc_info_to_string(err, test)))
+ extra = self.call_callbacks(test, "ERROR")
+ extra.update(self._get_class_method_name(test))
+ self.logger.test_end(test.id(),
+ "ERROR",
+ message=self._extract_err_message(err),
+ expected="PASS",
+ stack=self._extract_stacktrace(err, test),
+ extra=extra)
+
+ def addFailure(self, test, err):
+ extra = self.call_callbacks(test, "FAIL")
+ extra.update(self._get_class_method_name(test))
+ self.logger.test_end(test.id(),
+ "FAIL",
+ message=self._extract_err_message(err),
+ expected="PASS",
+ stack=self._extract_stacktrace(err, test),
+ extra=extra)
+
+ def addSuccess(self, test):
+ extra = self._get_class_method_name(test)
+ self.logger.test_end(test.id(),
+ "PASS",
+ expected="PASS",
+ extra=extra)
+
+ def addExpectedFailure(self, test, err):
+ extra = self.call_callbacks(test, "FAIL")
+ extra.update(self._get_class_method_name(test))
+ self.logger.test_end(test.id(),
+ "FAIL",
+ message=self._extract_err_message(err),
+ expected="FAIL",
+ stack=self._extract_stacktrace(err, test),
+ extra=extra)
+
+ def addUnexpectedSuccess(self, test):
+ extra = self.call_callbacks(test, "PASS")
+ extra.update(self._get_class_method_name(test))
+ self.logger.test_end(test.id(),
+ "PASS",
+ expected="FAIL",
+ extra=extra)
+
+ def addSkip(self, test, reason):
+ extra = self.call_callbacks(test, "SKIP")
+ extra.update(self._get_class_method_name(test))
+ self.logger.test_end(test.id(),
+ "SKIP",
+ message=reason,
+ expected="PASS",
+ extra=extra)
+
+
+class StructuredTestRunner(unittest.TextTestRunner):
+
+ resultclass = StructuredTestResult
+
+ def __init__(self, **kwargs):
+ """TestRunner subclass designed for structured logging.
+
+ :params logger: A ``StructuredLogger`` to use for logging the test run.
+ :params test_list: An optional list of tests that will be passed along
+ the `suite_start` message.
+
+ """
+
+ self.logger = kwargs.pop("logger")
+ self.test_list = kwargs.pop("test_list", [])
+ self.result_callbacks = kwargs.pop("result_callbacks", [])
+ unittest.TextTestRunner.__init__(self, **kwargs)
+
+ def _makeResult(self):
+ return self.resultclass(self.stream,
+ self.descriptions,
+ self.verbosity,
+ logger=self.logger,
+ test_list=self.test_list)
+
+ def run(self, test):
+ """Run the given test case or test suite."""
+ result = self._makeResult()
+ result.failfast = self.failfast
+ result.buffer = self.buffer
+ startTime = time.time()
+ startTestRun = getattr(result, 'startTestRun', None)
+ if startTestRun is not None:
+ startTestRun()
+ try:
+ test(result)
+ finally:
+ stopTestRun = getattr(result, 'stopTestRun', None)
+ if stopTestRun is not None:
+ stopTestRun()
+ stopTime = time.time()
+ if hasattr(result, 'time_taken'):
+ result.time_taken = stopTime - startTime
+
+ return result
diff --git a/testing/mozbase/moztest/moztest/output/__init__.py b/testing/mozbase/moztest/moztest/output/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/output/__init__.py
diff --git a/testing/mozbase/moztest/moztest/output/autolog.py b/testing/mozbase/moztest/moztest/output/autolog.py
new file mode 100644
index 000000000..b6c8368bd
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/output/autolog.py
@@ -0,0 +1,73 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from mozautolog import RESTfulAutologTestGroup
+
+from base import Output, count, long_name
+
+
+class AutologOutput(Output):
+
+ def __init__(self, es_server='buildbot-es.metrics.scl3.mozilla.com:9200',
+ rest_server='http://brasstacks.mozilla.com/autologserver',
+ name='moztest',
+ harness='moztest'):
+ self.es_server = es_server
+ self.rest_server = rest_server
+
+ def serialize(self, results_collection, file_obj):
+ grps = self.make_testgroups(results_collection)
+ for g in grps:
+ file_obj.write(g.serialize())
+
+ def make_testgroups(self, results_collection):
+ testgroups = []
+ for context in results_collection.contexts:
+ coll = results_collection.subset(lambda t: t.context == context)
+ passed = coll.tests_with_result('PASS')
+ failed = coll.tests_with_result('UNEXPECTED-FAIL')
+ unexpected_passes = coll.tests_with_result('UNEXPECTED-PASS')
+ errors = coll.tests_with_result('ERROR')
+ skipped = coll.tests_with_result('SKIPPED')
+ known_fails = coll.tests_with_result('KNOWN-FAIL')
+
+ testgroup = RESTfulAutologTestGroup(
+ testgroup=context.testgroup,
+ os=context.os,
+ platform=context.arch,
+ harness=context.harness,
+ server=self.es_server,
+ restserver=self.rest_server,
+ machine=context.hostname,
+ logfile=context.logfile,
+ )
+ testgroup.add_test_suite(
+ testsuite=results_collection.suite_name,
+ elapsedtime=coll.time_taken,
+ passed=count(passed),
+ failed=count(failed) + count(errors) + count(unexpected_passes),
+ todo=count(skipped) + count(known_fails),
+ )
+ testgroup.set_primary_product(
+ tree=context.tree,
+ revision=context.revision,
+ productname=context.product,
+ buildtype=context.buildtype,
+ )
+ # need to call this again since we already used the generator
+ for f in coll.tests_with_result('UNEXPECTED-FAIL'):
+ testgroup.add_test_failure(
+ test=long_name(f),
+ text='\n'.join(f.output),
+ status=f.result,
+ )
+ testgroups.append(testgroup)
+ return testgroups
+
+ def post(self, data):
+ msg = "Must pass in a list returned by make_testgroups."
+ for d in data:
+ assert isinstance(d, RESTfulAutologTestGroup), msg
+ d.submit()
diff --git a/testing/mozbase/moztest/moztest/output/base.py b/testing/mozbase/moztest/moztest/output/base.py
new file mode 100644
index 000000000..7e39317fb
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/output/base.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from __future__ import with_statement
+from contextlib import closing
+from StringIO import StringIO
+
+try:
+ from abc import abstractmethod
+except ImportError:
+ # abc is python 2.6+
+ # from https://github.com/mozilla/mozbase/blob/master/mozdevice/mozdevice/devicemanager.py
+ def abstractmethod(method):
+ line = method.func_code.co_firstlineno
+ filename = method.func_code.co_filename
+
+ def not_implemented(*args, **kwargs):
+ raise NotImplementedError('Abstract method %s at File "%s", '
+ 'line %s should be implemented by a concrete class' %
+ (repr(method), filename, line))
+ return not_implemented
+
+
+class Output(object):
+ """ Abstract base class for outputting test results """
+
+ @abstractmethod
+ def serialize(self, results_collection, file_obj):
+ """ Writes the string representation of the results collection
+ to the given file object"""
+
+ def dump_string(self, results_collection):
+ """ Returns the string representation of the results collection """
+ with closing(StringIO()) as s:
+ self.serialize(results_collection, s)
+ return s.getvalue()
+
+
+# helper functions
+def count(iterable):
+ """ Return the count of an iterable. Useful for generators. """
+ c = 0
+ for i in iterable:
+ c += 1
+ return c
+
+
+def long_name(test):
+ if test.test_class:
+ return '%s.%s' % (test.test_class, test.name)
+ return test.name
diff --git a/testing/mozbase/moztest/moztest/output/xunit.py b/testing/mozbase/moztest/moztest/output/xunit.py
new file mode 100644
index 000000000..0d0ef7bb4
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/output/xunit.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import xml.dom.minidom as dom
+
+from base import Output, count
+from moztest.results import TestResult
+
+
+class XUnitOutput(Output):
+ """ Class for writing xUnit formatted test results in an XML file """
+
+ def serialize(self, results_collection, file_obj):
+ """ Writes the xUnit formatted results to the given file object """
+
+ def _extract_xml(test_result, text='', result='Pass'):
+ if not isinstance(text, basestring):
+ text = '\n'.join(text)
+
+ cls_name = test_result.test_class
+
+ # if the test class is not already created, create it
+ if cls_name not in classes:
+ cls = doc.createElement('class')
+ cls.setAttribute('name', cls_name)
+ assembly.appendChild(cls)
+ classes[cls_name] = cls
+
+ t = doc.createElement('test')
+ t.setAttribute('name', test_result.name)
+ t.setAttribute('result', result)
+
+ if result == 'Fail':
+ f = doc.createElement('failure')
+ st = doc.createElement('stack-trace')
+ st.appendChild(doc.createTextNode(text))
+
+ f.appendChild(st)
+ t.appendChild(f)
+
+ elif result == 'Skip':
+ r = doc.createElement('reason')
+ msg = doc.createElement('message')
+ msg.appendChild(doc.createTextNode(text))
+
+ r.appendChild(msg)
+ t.appendChild(f)
+
+ cls = classes[cls_name]
+ cls.appendChild(t)
+
+ doc = dom.Document()
+
+ failed = sum([count(results_collection.tests_with_result(t))
+ for t in TestResult.FAIL_RESULTS])
+ passed = count(results_collection.tests_with_result('PASS'))
+ skipped = count(results_collection.tests_with_result('SKIPPED'))
+
+ assembly = doc.createElement('assembly')
+ assembly.setAttribute('name', results_collection.suite_name)
+ assembly.setAttribute('time', str(results_collection.time_taken))
+ assembly.setAttribute('total', str(len(results_collection)))
+ assembly.setAttribute('passed', str(passed))
+ assembly.setAttribute('failed', str(failed))
+ assembly.setAttribute('skipped', str(skipped))
+
+ classes = {} # str -> xml class element
+
+ for tr in results_collection.tests_with_result('ERROR'):
+ _extract_xml(tr, text=tr.output, result='Fail')
+
+ for tr in results_collection.tests_with_result('UNEXPECTED-FAIL'):
+ _extract_xml(tr, text=tr.output, result='Fail')
+
+ for tr in results_collection.tests_with_result('UNEXPECTED-PASS'):
+ _extract_xml(tr, text='UNEXPECTED-PASS', result='Fail')
+
+ for tr in results_collection.tests_with_result('SKIPPED'):
+ _extract_xml(tr, text=tr.output, result='Skip')
+
+ for tr in results_collection.tests_with_result('KNOWN-FAIL'):
+ _extract_xml(tr, text=tr.output, result='Pass')
+
+ for tr in results_collection.tests_with_result('PASS'):
+ _extract_xml(tr, result='Pass')
+
+ for cls in classes.itervalues():
+ assembly.appendChild(cls)
+
+ doc.appendChild(assembly)
+ file_obj.write(doc.toxml(encoding='utf-8'))
diff --git a/testing/mozbase/moztest/moztest/results.py b/testing/mozbase/moztest/moztest/results.py
new file mode 100644
index 000000000..435665c67
--- /dev/null
+++ b/testing/mozbase/moztest/moztest/results.py
@@ -0,0 +1,323 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import time
+import os
+import mozinfo
+
+
+class TestContext(object):
+ """ Stores context data about the test """
+
+ attrs = ['hostname', 'arch', 'env', 'os', 'os_version', 'tree', 'revision',
+ 'product', 'logfile', 'testgroup', 'harness', 'buildtype']
+
+ def __init__(self, hostname='localhost', tree='', revision='', product='',
+ logfile=None, arch='', operating_system='', testgroup='',
+ harness='moztest', buildtype=''):
+ self.hostname = hostname
+ self.arch = arch or mozinfo.processor
+ self.env = os.environ.copy()
+ self.os = operating_system or mozinfo.os
+ self.os_version = mozinfo.version
+ self.tree = tree
+ self.revision = revision
+ self.product = product
+ self.logfile = logfile
+ self.testgroup = testgroup
+ self.harness = harness
+ self.buildtype = buildtype
+
+ def __str__(self):
+ return '%s (%s, %s)' % (self.hostname, self.os, self.arch)
+
+ def __repr__(self):
+ return '<%s>' % self.__str__()
+
+ def __eq__(self, other):
+ if not isinstance(other, TestContext):
+ return False
+ diffs = [a for a in self.attrs if getattr(self, a) != getattr(other, a)]
+ return len(diffs) == 0
+
+ def __hash__(self):
+ def get(attr):
+ value = getattr(self, attr)
+ if isinstance(value, dict):
+ value = frozenset(value.items())
+ return value
+ return hash(frozenset([get(a) for a in self.attrs]))
+
+
+class TestResult(object):
+ """ Stores test result data """
+
+ FAIL_RESULTS = [
+ 'UNEXPECTED-PASS',
+ 'UNEXPECTED-FAIL',
+ 'ERROR',
+ ]
+ COMPUTED_RESULTS = FAIL_RESULTS + [
+ 'PASS',
+ 'KNOWN-FAIL',
+ 'SKIPPED',
+ ]
+ POSSIBLE_RESULTS = [
+ 'PASS',
+ 'FAIL',
+ 'SKIP',
+ 'ERROR',
+ ]
+
+ def __init__(self, name, test_class='', time_start=None, context=None,
+ result_expected='PASS'):
+ """ Create a TestResult instance.
+ name = name of the test that is running
+ test_class = the class that the test belongs to
+ time_start = timestamp (seconds since UNIX epoch) of when the test started
+ running; if not provided, defaults to the current time
+ ! Provide 0 if you only have the duration
+ context = TestContext instance; can be None
+ result_expected = string representing the expected outcome of the test"""
+
+ msg = "Result '%s' not in possible results: %s" %\
+ (result_expected, ', '.join(self.POSSIBLE_RESULTS))
+ assert isinstance(name, basestring), "name has to be a string"
+ assert result_expected in self.POSSIBLE_RESULTS, msg
+
+ self.name = name
+ self.test_class = test_class
+ self.context = context
+ self.time_start = time_start if time_start is not None else time.time()
+ self.time_end = None
+ self._result_expected = result_expected
+ self._result_actual = None
+ self.result = None
+ self.filename = None
+ self.description = None
+ self.output = []
+ self.reason = None
+
+ @property
+ def test_name(self):
+ return '%s.py %s.%s' % (self.test_class.split('.')[0],
+ self.test_class,
+ self.name)
+
+ def __str__(self):
+ return '%s | %s (%s) | %s' % (self.result or 'PENDING',
+ self.name, self.test_class, self.reason)
+
+ def __repr__(self):
+ return '<%s>' % self.__str__()
+
+ def calculate_result(self, expected, actual):
+ if actual == 'ERROR':
+ return 'ERROR'
+ if actual == 'SKIP':
+ return 'SKIPPED'
+
+ if expected == 'PASS':
+ if actual == 'PASS':
+ return 'PASS'
+ if actual == 'FAIL':
+ return 'UNEXPECTED-FAIL'
+
+ if expected == 'FAIL':
+ if actual == 'PASS':
+ return 'UNEXPECTED-PASS'
+ if actual == 'FAIL':
+ return 'KNOWN-FAIL'
+
+ # if actual is skip or error, we return at the beginning, so if we get
+ # here it is definitely some kind of error
+ return 'ERROR'
+
+ def infer_results(self, computed_result):
+ assert computed_result in self.COMPUTED_RESULTS
+ if computed_result == 'UNEXPECTED-PASS':
+ expected = 'FAIL'
+ actual = 'PASS'
+ elif computed_result == 'UNEXPECTED-FAIL':
+ expected = 'PASS'
+ actual = 'FAIL'
+ elif computed_result == 'KNOWN-FAIL':
+ expected = actual = 'FAIL'
+ elif computed_result == 'SKIPPED':
+ expected = actual = 'SKIP'
+ else:
+ return
+ self._result_expected = expected
+ self._result_actual = actual
+
+ def finish(self, result, time_end=None, output=None, reason=None):
+ """ Marks the test as finished, storing its end time and status
+ ! Provide the duration as time_end if you only have that. """
+
+ if result in self.POSSIBLE_RESULTS:
+ self._result_actual = result
+ self.result = self.calculate_result(self._result_expected,
+ self._result_actual)
+ elif result in self.COMPUTED_RESULTS:
+ self.infer_results(result)
+ self.result = result
+ else:
+ valid = self.POSSIBLE_RESULTS + self.COMPUTED_RESULTS
+ msg = "Result '%s' not valid. Need one of: %s" %\
+ (result, ', '.join(valid))
+ raise ValueError(msg)
+
+ # use lists instead of multiline strings
+ if isinstance(output, basestring):
+ output = output.splitlines()
+
+ self.time_end = time_end if time_end is not None else time.time()
+ self.output = output or self.output
+ self.reason = reason
+
+ @property
+ def finished(self):
+ """ Boolean saying if the test is finished or not """
+ return self.result is not None
+
+ @property
+ def duration(self):
+ """ Returns the time it took for the test to finish. If the test is
+ not finished, returns the elapsed time so far """
+ if self.result is not None:
+ return self.time_end - self.time_start
+ else:
+ # returns the elapsed time
+ return time.time() - self.time_start
+
+
+class TestResultCollection(list):
+ """ Container class that stores test results """
+
+ resultClass = TestResult
+
+ def __init__(self, suite_name, time_taken=0, resultClass=None):
+ list.__init__(self)
+ self.suite_name = suite_name
+ self.time_taken = time_taken
+ if resultClass is not None:
+ self.resultClass = resultClass
+
+ def __str__(self):
+ return "%s (%.2fs)\n%s" % (self.suite_name, self.time_taken,
+ list.__str__(self))
+
+ def subset(self, predicate):
+ tests = self.filter(predicate)
+ duration = 0
+ sub = TestResultCollection(self.suite_name)
+ for t in tests:
+ sub.append(t)
+ duration += t.duration
+ sub.time_taken = duration
+ return sub
+
+ @property
+ def contexts(self):
+ """ List of unique contexts for the test results contained """
+ cs = [tr.context for tr in self]
+ return list(set(cs))
+
+ def filter(self, predicate):
+ """ Returns a generator of TestResults that satisfy a given predicate """
+ return (tr for tr in self if predicate(tr))
+
+ def tests_with_result(self, result):
+ """ Returns a generator of TestResults with the given result """
+ msg = "Result '%s' not in possible results: %s" %\
+ (result, ', '.join(self.resultClass.COMPUTED_RESULTS))
+ assert result in self.resultClass.COMPUTED_RESULTS, msg
+ return self.filter(lambda t: t.result == result)
+
+ @property
+ def tests(self):
+ """ Generator of all tests in the collection """
+ return (t for t in self)
+
+ def add_result(self, test, result_expected='PASS',
+ result_actual='PASS', output='', context=None):
+ def get_class(test):
+ return test.__class__.__module__ + '.' + test.__class__.__name__
+
+ t = self.resultClass(name=str(test).split()[0], test_class=get_class(test),
+ time_start=0, result_expected=result_expected,
+ context=context)
+ t.finish(result_actual, time_end=0, reason=relevant_line(output),
+ output=output)
+ self.append(t)
+
+ @property
+ def num_failures(self):
+ fails = 0
+ for t in self:
+ if t.result in self.resultClass.FAIL_RESULTS:
+ fails += 1
+ return fails
+
+ def add_unittest_result(self, result, context=None):
+ """ Adds the python unittest result provided to the collection"""
+ if hasattr(result, 'time_taken'):
+ self.time_taken += result.time_taken
+
+ for test, output in result.errors:
+ self.add_result(test, result_actual='ERROR', output=output)
+
+ for test, output in result.failures:
+ self.add_result(test, result_actual='FAIL',
+ output=output)
+
+ if hasattr(result, 'unexpectedSuccesses'):
+ for test in result.unexpectedSuccesses:
+ self.add_result(test, result_expected='FAIL',
+ result_actual='PASS')
+
+ if hasattr(result, 'skipped'):
+ for test, output in result.skipped:
+ self.add_result(test, result_expected='SKIP',
+ result_actual='SKIP', output=output)
+
+ if hasattr(result, 'expectedFailures'):
+ for test, output in result.expectedFailures:
+ self.add_result(test, result_expected='FAIL',
+ result_actual='FAIL', output=output)
+
+ # unittest does not store these by default
+ if hasattr(result, 'tests_passed'):
+ for test in result.tests_passed:
+ self.add_result(test)
+
+ @classmethod
+ def from_unittest_results(cls, context, *results):
+ """ Creates a TestResultCollection containing the given python
+ unittest results """
+
+ if not results:
+ return cls('from unittest')
+
+ # all the TestResult instances share the same context
+ context = context or TestContext()
+
+ collection = cls('from %s' % results[0].__class__.__name__)
+
+ for result in results:
+ collection.add_unittest_result(result, context)
+
+ return collection
+
+
+# used to get exceptions/errors from tracebacks
+def relevant_line(s):
+ KEYWORDS = ('Error:', 'Exception:', 'error:', 'exception:')
+ lines = s.splitlines()
+ for line in lines:
+ for keyword in KEYWORDS:
+ if keyword in line:
+ return line
+ return 'N/A'
diff --git a/testing/mozbase/moztest/setup.py b/testing/mozbase/moztest/setup.py
new file mode 100644
index 000000000..9c7c11789
--- /dev/null
+++ b/testing/mozbase/moztest/setup.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup, find_packages
+
+PACKAGE_VERSION = '0.8'
+
+# dependencies
+deps = ['mozinfo']
+
+setup(name='moztest',
+ version=PACKAGE_VERSION,
+ description="Package for storing and outputting Mozilla test results",
+ long_description="see http://mozbase.readthedocs.org/",
+ classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords='mozilla',
+ author='Mozilla Automation and Tools team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=find_packages(),
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+ )
diff --git a/testing/mozbase/moztest/tests/manifest.ini b/testing/mozbase/moztest/tests/manifest.ini
new file mode 100644
index 000000000..528fdea7b
--- /dev/null
+++ b/testing/mozbase/moztest/tests/manifest.ini
@@ -0,0 +1 @@
+[test.py]
diff --git a/testing/mozbase/moztest/tests/test.py b/testing/mozbase/moztest/tests/test.py
new file mode 100644
index 000000000..93096ec2a
--- /dev/null
+++ b/testing/mozbase/moztest/tests/test.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import math
+import time
+import unittest
+
+from moztest.results import TestContext, TestResult, TestResultCollection
+
+
+class Result(unittest.TestCase):
+
+ def test_results(self):
+ self.assertRaises(AssertionError,
+ lambda: TestResult('test', result_expected='hello'))
+ t = TestResult('test')
+ self.assertRaises(ValueError, lambda: t.finish(result='good bye'))
+
+ def test_time(self):
+ now = time.time()
+ t = TestResult('test')
+ time.sleep(1)
+ t.finish('PASS')
+ duration = time.time() - now
+ self.assertTrue(math.fabs(duration - t.duration) < 1)
+
+ def test_custom_time(self):
+ t = TestResult('test', time_start=0)
+ t.finish(result='PASS', time_end=1000)
+ self.assertEqual(t.duration, 1000)
+
+
+class Collection(unittest.TestCase):
+
+ def setUp(self):
+ c1 = TestContext('host1')
+ c2 = TestContext('host2')
+ c3 = TestContext('host2')
+ c3.os = 'B2G'
+ c4 = TestContext('host1')
+
+ t1 = TestResult('t1', context=c1)
+ t2 = TestResult('t2', context=c2)
+ t3 = TestResult('t3', context=c3)
+ t4 = TestResult('t4', context=c4)
+
+ self.collection = TestResultCollection('tests')
+ self.collection.extend([t1, t2, t3, t4])
+
+ def test_unique_contexts(self):
+ self.assertEqual(len(self.collection.contexts), 3)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozversion/mozversion/__init__.py b/testing/mozbase/mozversion/mozversion/__init__.py
new file mode 100644
index 000000000..7894bcb9c
--- /dev/null
+++ b/testing/mozbase/mozversion/mozversion/__init__.py
@@ -0,0 +1,7 @@
+# flake8: noqa
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .errors import *
+from .mozversion import cli, get_version
diff --git a/testing/mozbase/mozversion/mozversion/errors.py b/testing/mozbase/mozversion/mozversion/errors.py
new file mode 100644
index 000000000..756e772d6
--- /dev/null
+++ b/testing/mozbase/mozversion/mozversion/errors.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class VersionError(Exception):
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+class AppNotFoundError(VersionError):
+ """Exception for the application not found"""
+
+ def __init__(self, message):
+ VersionError.__init__(self, message)
+
+
+class LocalAppNotFoundError(AppNotFoundError):
+ """Exception for local application not found"""
+
+ def __init__(self, path):
+ AppNotFoundError.__init__(self, 'Application not found at: %s' % path)
+
+
+class RemoteAppNotFoundError(AppNotFoundError):
+ """Exception for remote application not found"""
+
+ def __init__(self, message):
+ AppNotFoundError.__init__(self, message)
diff --git a/testing/mozbase/mozversion/mozversion/mozversion.py b/testing/mozbase/mozversion/mozversion/mozversion.py
new file mode 100644
index 000000000..5dfcd306a
--- /dev/null
+++ b/testing/mozbase/mozversion/mozversion/mozversion.py
@@ -0,0 +1,340 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import ConfigParser
+from StringIO import StringIO
+import os
+import re
+import sys
+import tempfile
+import xml.dom.minidom
+import zipfile
+
+import mozfile
+import mozlog
+
+import errors
+
+
+INI_DATA_MAPPING = (('application', 'App'), ('platform', 'Build'))
+
+
+class Version(object):
+
+ def __init__(self):
+ self._info = {}
+ self._logger = mozlog.get_default_logger(component='mozversion')
+ if not self._logger:
+ self._logger = mozlog.unstructured.getLogger('mozversion')
+
+ def get_gecko_info(self, path):
+ for type, section in INI_DATA_MAPPING:
+ config_file = os.path.join(path, "%s.ini" % type)
+ if os.path.exists(config_file):
+ self._parse_ini_file(open(config_file), type, section)
+ else:
+ self._logger.warning('Unable to find %s' % config_file)
+
+ def _parse_ini_file(self, fp, type, section):
+ config = ConfigParser.RawConfigParser()
+ config.readfp(fp)
+ name_map = {'codename': 'display_name',
+ 'milestone': 'version',
+ 'sourcerepository': 'repository',
+ 'sourcestamp': 'changeset'}
+ for key, value in config.items(section):
+ name = name_map.get(key, key).lower()
+ self._info['%s_%s' % (type, name)] = config.has_option(
+ section, key) and config.get(section, key) or None
+
+ if not self._info.get('application_display_name'):
+ self._info['application_display_name'] = \
+ self._info.get('application_name')
+
+
+class LocalFennecVersion(Version):
+
+ def __init__(self, path, **kwargs):
+ Version.__init__(self, **kwargs)
+ self.get_gecko_info(path)
+
+ def get_gecko_info(self, path):
+ archive = zipfile.ZipFile(path, 'r')
+ archive_list = archive.namelist()
+ for type, section in INI_DATA_MAPPING:
+ filename = "%s.ini" % type
+ if filename in archive_list:
+ self._parse_ini_file(archive.open(filename), type,
+ section)
+ else:
+ self._logger.warning('Unable to find %s' % filename)
+
+ if "package-name.txt" in archive_list:
+ self._info["package_name"] = \
+ archive.open("package-name.txt").readlines()[0].strip()
+
+
+class LocalVersion(Version):
+
+ def __init__(self, binary, **kwargs):
+ Version.__init__(self, **kwargs)
+
+ if binary:
+ # on Windows, the binary may be specified with or without the
+ # .exe extension
+ if not os.path.exists(binary) and not os.path.exists(binary +
+ '.exe'):
+ raise IOError('Binary path does not exist: %s' % binary)
+ path = os.path.dirname(os.path.realpath(binary))
+ else:
+ path = os.getcwd()
+
+ if not self.check_location(path):
+ if sys.platform == 'darwin':
+ resources_path = os.path.join(os.path.dirname(path),
+ 'Resources')
+ if self.check_location(resources_path):
+ path = resources_path
+ else:
+ raise errors.LocalAppNotFoundError(path)
+ else:
+ raise errors.LocalAppNotFoundError(path)
+
+ self.get_gecko_info(path)
+
+ def check_location(self, path):
+ return (os.path.exists(os.path.join(path, 'application.ini'))
+ and os.path.exists(os.path.join(path, 'platform.ini')))
+
+
+class B2GVersion(Version):
+
+ def __init__(self, sources=None, **kwargs):
+ Version.__init__(self, **kwargs)
+
+ sources = sources or \
+ os.path.exists(os.path.join(os.getcwd(), 'sources.xml')) and \
+ os.path.join(os.getcwd(), 'sources.xml')
+
+ if sources and os.path.exists(sources):
+ sources_xml = xml.dom.minidom.parse(sources)
+ for element in sources_xml.getElementsByTagName('project'):
+ path = element.getAttribute('path')
+ changeset = element.getAttribute('revision')
+ if path in ['gaia', 'gecko', 'build']:
+ if path == 'gaia' and self._info.get('gaia_changeset'):
+ break
+ self._info['_'.join([path, 'changeset'])] = changeset
+
+ def get_gaia_info(self, app_zip):
+ tempdir = tempfile.mkdtemp()
+ try:
+ gaia_commit = os.path.join(tempdir, 'gaia_commit.txt')
+ try:
+ zip_file = zipfile.ZipFile(app_zip.name)
+ with open(gaia_commit, 'w') as f:
+ f.write(zip_file.read('resources/gaia_commit.txt'))
+ except zipfile.BadZipfile:
+ self._logger.info('Unable to unzip application.zip, falling '
+ 'back to system unzip')
+ from subprocess import call
+ call(['unzip', '-j', app_zip.name, 'resources/gaia_commit.txt',
+ '-d', tempdir])
+
+ with open(gaia_commit) as f:
+ changeset, date = f.read().splitlines()
+ self._info['gaia_changeset'] = re.match(
+ '^\w{40}$', changeset) and changeset or None
+ self._info['gaia_date'] = date
+ except KeyError:
+ self._logger.warning(
+ 'Unable to find resources/gaia_commit.txt in '
+ 'application.zip')
+ finally:
+ mozfile.remove(tempdir)
+
+
+class LocalB2GVersion(B2GVersion):
+
+ def __init__(self, binary, sources=None, **kwargs):
+ B2GVersion.__init__(self, sources, **kwargs)
+
+ if binary:
+ if not os.path.exists(binary):
+ raise IOError('Binary path does not exist: %s' % binary)
+ path = os.path.dirname(binary)
+ else:
+ if os.path.exists(os.path.join(os.getcwd(), 'application.ini')):
+ path = os.getcwd()
+
+ self.get_gecko_info(path)
+
+ zip_path = os.path.join(
+ path, 'gaia', 'profile', 'webapps',
+ 'settings.gaiamobile.org', 'application.zip')
+ if os.path.exists(zip_path):
+ with open(zip_path, 'rb') as zip_file:
+ self.get_gaia_info(zip_file)
+ else:
+ self._logger.warning('Error pulling gaia file')
+
+
+class RemoteB2GVersion(B2GVersion):
+
+ def __init__(self, sources=None, dm_type='adb', host=None,
+ device_serial=None, adb_host=None, adb_port=None,
+ **kwargs):
+ B2GVersion.__init__(self, sources, **kwargs)
+
+ try:
+ import mozdevice
+ except ImportError:
+ self._logger.critical("mozdevice is required to get the version"
+ " of a remote device")
+ raise
+
+ if dm_type == 'adb':
+ dm = mozdevice.DeviceManagerADB(deviceSerial=device_serial,
+ serverHost=adb_host,
+ serverPort=adb_port)
+ elif dm_type == 'sut':
+ if not host:
+ raise errors.RemoteAppNotFoundError(
+ 'A host for SUT must be supplied.')
+ dm = mozdevice.DeviceManagerSUT(host=host)
+ else:
+ raise errors.RemoteAppNotFoundError(
+ 'Unknown device manager type: %s' % dm_type)
+
+ if not sources:
+ path = 'system/sources.xml'
+ if dm.fileExists(path):
+ sources = StringIO(dm.pullFile(path))
+ else:
+ self._logger.info('Unable to find %s' % path)
+
+ tempdir = tempfile.mkdtemp()
+ for ini in ('application', 'platform'):
+ with open(os.path.join(tempdir, '%s.ini' % ini), 'w') as f:
+ f.write(dm.pullFile('/system/b2g/%s.ini' % ini))
+ f.flush()
+ self.get_gecko_info(tempdir)
+ mozfile.remove(tempdir)
+
+ for path in ['/system/b2g', '/data/local']:
+ path += '/webapps/settings.gaiamobile.org/application.zip'
+ if dm.fileExists(path):
+ with tempfile.NamedTemporaryFile() as f:
+ dm.getFile(path, f.name)
+ self.get_gaia_info(f)
+ break
+ else:
+ self._logger.warning('Error pulling gaia file')
+
+ build_props = dm.pullFile('/system/build.prop')
+ desired_props = {
+ 'ro.build.version.incremental': 'device_firmware_version_incremental',
+ 'ro.build.version.release': 'device_firmware_version_release',
+ 'ro.build.date.utc': 'device_firmware_date',
+ 'ro.product.device': 'device_id'}
+ for line in build_props.split('\n'):
+ if not line.strip().startswith('#') and '=' in line:
+ key, value = [s.strip() for s in line.split('=', 1)]
+ if key in desired_props.keys():
+ self._info[desired_props[key]] = value
+
+ if self._info.get('device_id', '').lower() == 'flame':
+ for prop in ['ro.boot.bootloader', 't2m.sw.version']:
+ value = dm.shellCheckOutput(['getprop', prop])
+ if value:
+ self._info['device_firmware_version_base'] = value
+ break
+
+
+def get_version(binary=None, sources=None, dm_type=None, host=None,
+ device_serial=None, adb_host=None, adb_port=None):
+ """
+ Returns the application version information as a dict. You can specify
+ a path to the binary of the application or an Android APK file (to get
+ version information for Firefox for Android). If this is omitted then the
+ current directory is checked for the existance of an application.ini
+ file. If not found and that the binary path was not specified, then it is
+ assumed the target application is a remote Firefox OS instance.
+
+ :param binary: Path to the binary for the application or Android APK file
+ :param sources: Path to the sources.xml file (Firefox OS)
+ :param dm_type: Device manager type. Must be 'adb' or 'sut' (Firefox OS)
+ :param host: Host address of remote Firefox OS instance (SUT)
+ :param device_serial: Serial identifier of Firefox OS device (ADB)
+ :param adb_host: Host address of ADB server
+ :param adb_port: Port of ADB server
+ """
+ try:
+ if binary and zipfile.is_zipfile(binary) and 'AndroidManifest.xml' in \
+ zipfile.ZipFile(binary, 'r').namelist():
+ version = LocalFennecVersion(binary)
+ else:
+ version = LocalVersion(binary)
+ if version._info.get('application_name') == 'B2G':
+ version = LocalB2GVersion(binary, sources=sources)
+ except errors.LocalAppNotFoundError:
+ if binary:
+ # we had a binary argument, do not search for remote B2G
+ raise
+ version = RemoteB2GVersion(sources=sources,
+ dm_type=dm_type,
+ host=host,
+ adb_host=adb_host,
+ adb_port=adb_port,
+ device_serial=device_serial)
+
+ for (key, value) in sorted(version._info.items()):
+ if value:
+ version._logger.info('%s: %s' % (key, value))
+
+ return version._info
+
+
+def cli(args=sys.argv[1:]):
+ parser = argparse.ArgumentParser(
+ description='Display version information for Mozilla applications')
+ parser.add_argument(
+ '--binary',
+ help='path to application binary or apk')
+ fxos = parser.add_argument_group('Firefox OS')
+ fxos.add_argument(
+ '--sources',
+ help='path to sources.xml')
+ fxos.add_argument(
+ '--device',
+ help='serial identifier of device to target')
+ fxos.add_argument(
+ '--adb-host',
+ help='host running adb')
+ fxos.add_argument(
+ '--adb-port',
+ help='port running adb')
+ mozlog.commandline.add_logging_group(
+ parser,
+ include_formatters=mozlog.commandline.TEXT_FORMATTERS
+ )
+
+ args = parser.parse_args()
+ dm_type = os.environ.get('DM_TRANS', 'adb')
+ host = os.environ.get('TEST_DEVICE')
+
+ mozlog.commandline.setup_logging(
+ 'mozversion', args, {'mach': sys.stdout})
+
+ get_version(binary=args.binary,
+ sources=args.sources,
+ dm_type=dm_type,
+ host=host,
+ device_serial=args.device,
+ adb_host=args.adb_host,
+ adb_port=args.adb_port)
+
+if __name__ == '__main__':
+ cli()
diff --git a/testing/mozbase/mozversion/setup.py b/testing/mozbase/mozversion/setup.py
new file mode 100644
index 000000000..09b027925
--- /dev/null
+++ b/testing/mozbase/mozversion/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_VERSION = '1.4'
+
+
+setup(name='mozversion',
+ version=PACKAGE_VERSION,
+ description='Library to get version information for applications',
+ long_description='See http://mozbase.readthedocs.org',
+ classifiers=[],
+ keywords='mozilla',
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
+ license='MPL',
+ packages=['mozversion'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=['mozfile >= 1.0', 'mozlog >= 3.0'],
+ extras_require={'device': ['mozdevice >= 0.44']},
+ entry_points="""
+ # -*- Entry points: -*-
+ [console_scripts]
+ mozversion = mozversion:cli
+ """)
diff --git a/testing/mozbase/mozversion/tests/manifest.ini b/testing/mozbase/mozversion/tests/manifest.ini
new file mode 100644
index 000000000..3c034ea78
--- /dev/null
+++ b/testing/mozbase/mozversion/tests/manifest.ini
@@ -0,0 +1,4 @@
+[test_binary.py]
+[test_sources.py]
+[test_b2g.py]
+[test_apk.py] \ No newline at end of file
diff --git a/testing/mozbase/mozversion/tests/test_apk.py b/testing/mozbase/mozversion/tests/test_apk.py
new file mode 100644
index 000000000..37fcb0bcf
--- /dev/null
+++ b/testing/mozbase/mozversion/tests/test_apk.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozfile
+import unittest
+import zipfile
+from mozversion import get_version
+
+
+class ApkTest(unittest.TestCase):
+ """test getting version information from an android .apk"""
+
+ application_changeset = 'a' * 40
+ platform_changeset = 'b' * 40
+
+ def create_apk_zipfiles(self, zfile):
+ zfile.writestr('application.ini',
+ """[App]\nSourceStamp=%s\n""" % self.application_changeset)
+ zfile.writestr('platform.ini',
+ """[Build]\nSourceStamp=%s\n""" % self.platform_changeset)
+ zfile.writestr('AndroidManifest.xml', '')
+
+ def test_basic(self):
+ with mozfile.NamedTemporaryFile() as f:
+ with zipfile.ZipFile(f.name, 'w') as z:
+ self.create_apk_zipfiles(z)
+ v = get_version(f.name)
+ self.assertEqual(v.get('application_changeset'), self.application_changeset)
+ self.assertEqual(v.get('platform_changeset'), self.platform_changeset)
+
+ def test_with_package_name(self):
+ with mozfile.NamedTemporaryFile() as f:
+ with zipfile.ZipFile(f.name, 'w') as z:
+ self.create_apk_zipfiles(z)
+ z.writestr('package-name.txt', "org.mozilla.fennec")
+ v = get_version(f.name)
+ self.assertEqual(v.get('package_name'), "org.mozilla.fennec")
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozversion/tests/test_b2g.py b/testing/mozbase/mozversion/tests/test_b2g.py
new file mode 100644
index 000000000..09e0eb09e
--- /dev/null
+++ b/testing/mozbase/mozversion/tests/test_b2g.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import tempfile
+import unittest
+import zipfile
+
+import mozfile
+from mozversion import get_version, errors
+
+
+class SourcesTest(unittest.TestCase):
+ """test getting version information from a sources xml"""
+
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+
+ self.binary = os.path.join(self.tempdir, 'binary')
+ with open(self.binary, 'w') as f:
+ f.write('foobar')
+
+ with open(os.path.join(self.tempdir, 'application.ini'), 'w') as f:
+ f.writelines("""[App]\nName = B2G\n""")
+
+ with open(os.path.join(self.tempdir, 'platform.ini'), 'w') as f:
+ f.write('[Build]\nBuildID = PlatformBuildID\n')
+
+ def tearDown(self):
+ mozfile.remove(self.tempdir)
+
+ def _create_zip(self, revision=None, date=None):
+ zip_path = os.path.join(
+ self.tempdir, 'gaia', 'profile', 'webapps',
+ 'settings.gaiamobile.org', 'application.zip')
+ os.makedirs(os.path.dirname(zip_path))
+ app_zip = zipfile.ZipFile(zip_path, 'w')
+ if revision or date:
+ app_zip.writestr('resources/gaia_commit.txt',
+ '%s\n%s' % (revision, date))
+ app_zip.close()
+
+ def test_gaia_commit(self):
+ revision, date = ('a' * 40, 'date')
+ self._create_zip(revision, date)
+ v = get_version(self.binary)
+ self.assertEqual(v.get('gaia_changeset'), revision)
+ self.assertEqual(v.get('gaia_date'), date)
+
+ def test_invalid_gaia_commit(self):
+ revision, date = ('a' * 41, 'date')
+ self._create_zip(revision, date)
+ v = get_version(self.binary)
+ self.assertIsNone(v.get('gaia_changeset'))
+ self.assertEqual(v.get('gaia_date'), date)
+
+ def test_missing_zip_file(self):
+ v = get_version(self.binary)
+ self.assertIsNone(v.get('gaia_changeset'))
+ self.assertIsNone(v.get('gaia_date'))
+
+ def test_missing_gaia_commit(self):
+ self._create_zip()
+ v = get_version(self.binary)
+ self.assertIsNone(v.get('gaia_changeset'))
+ self.assertIsNone(v.get('gaia_date'))
+
+ def test_b2g_fallback_when_no_binary(self):
+ self.assertRaises(errors.RemoteAppNotFoundError, get_version)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozversion/tests/test_binary.py b/testing/mozbase/mozversion/tests/test_binary.py
new file mode 100644
index 000000000..54665974f
--- /dev/null
+++ b/testing/mozbase/mozversion/tests/test_binary.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import tempfile
+import shutil
+import unittest
+
+import mozfile
+
+from mozversion import errors, get_version
+
+
+class BinaryTest(unittest.TestCase):
+ """test getting application version information from a binary path"""
+
+ application_ini = """[App]
+ID = AppID
+Name = AppName
+CodeName = AppCodeName
+Version = AppVersion
+BuildID = AppBuildID
+SourceRepository = AppSourceRepo
+SourceStamp = AppSourceStamp
+Vendor = AppVendor
+"""
+ platform_ini = """[Build]
+BuildID = PlatformBuildID
+Milestone = PlatformMilestone
+SourceStamp = PlatformSourceStamp
+SourceRepository = PlatformSourceRepo
+"""
+
+ def setUp(self):
+ self.cwd = os.getcwd()
+ self.tempdir = tempfile.mkdtemp()
+
+ self.binary = os.path.join(self.tempdir, 'binary')
+ with open(self.binary, 'w') as f:
+ f.write('foobar')
+
+ def tearDown(self):
+ os.chdir(self.cwd)
+ mozfile.remove(self.tempdir)
+
+ @unittest.skipIf(not os.environ.get('BROWSER_PATH'),
+ 'No binary has been specified.')
+ def test_real_binary(self):
+ v = get_version(os.environ.get('BROWSER_PATH'))
+ self.assertTrue(isinstance(v, dict))
+
+ def test_binary(self):
+ self._write_ini_files()
+
+ self._check_version(get_version(self.binary))
+
+ @unittest.skipIf(not hasattr(os, 'symlink'),
+ 'os.symlink not supported on this platform')
+ def test_symlinked_binary(self):
+ self._write_ini_files()
+
+ # create a symlink of the binary in another directory and check
+ # version against this symlink
+ tempdir = tempfile.mkdtemp()
+ try:
+ browser_link = os.path.join(tempdir,
+ os.path.basename(self.binary))
+ os.symlink(self.binary, browser_link)
+
+ self._check_version(get_version(browser_link))
+ finally:
+ mozfile.remove(tempdir)
+
+ def test_binary_in_current_path(self):
+ self._write_ini_files()
+
+ os.chdir(self.tempdir)
+ self._check_version(get_version())
+
+ def test_with_ini_files_on_osx(self):
+ self._write_ini_files()
+
+ platform = sys.platform
+ sys.platform = 'darwin'
+ try:
+ # get_version is working with ini files next to the binary
+ self._check_version(get_version(binary=self.binary))
+
+ # or if they are in the Resources dir
+ # in this case the binary must be in a Contents dir, next
+ # to the Resources dir
+ contents_dir = os.path.join(self.tempdir, 'Contents')
+ os.mkdir(contents_dir)
+ moved_binary = os.path.join(contents_dir,
+ os.path.basename(self.binary))
+ shutil.move(self.binary, moved_binary)
+
+ resources_dir = os.path.join(self.tempdir, 'Resources')
+ os.mkdir(resources_dir)
+ for ini_file in ('application.ini', 'platform.ini'):
+ shutil.move(os.path.join(self.tempdir, ini_file), resources_dir)
+
+ self._check_version(get_version(binary=moved_binary))
+ finally:
+ sys.platform = platform
+
+ def test_invalid_binary_path(self):
+ self.assertRaises(IOError, get_version,
+ os.path.join(self.tempdir, 'invalid'))
+
+ def test_without_ini_files(self):
+ """With missing ini files an exception should be thrown"""
+ self.assertRaises(errors.AppNotFoundError, get_version,
+ self.binary)
+
+ def test_without_platform_ini_file(self):
+ """With a missing platform.ini file an exception should be thrown"""
+ self._write_ini_files(platform=False)
+ self.assertRaises(errors.AppNotFoundError, get_version,
+ self.binary)
+
+ def test_without_application_ini_file(self):
+ """With a missing application.ini file an exception should be thrown"""
+ self._write_ini_files(application=False)
+ self.assertRaises(errors.AppNotFoundError, get_version,
+ self.binary)
+
+ def test_with_exe(self):
+ """Test that we can resolve .exe files"""
+ self._write_ini_files()
+
+ exe_name_unprefixed = self.binary + '1'
+ exe_name = exe_name_unprefixed + '.exe'
+ with open(exe_name, 'w') as f:
+ f.write('foobar')
+ self._check_version(get_version(exe_name_unprefixed))
+
+ def test_not_found_with_binary_specified(self):
+ self.assertRaises(errors.LocalAppNotFoundError, get_version, self.binary)
+
+ def _write_ini_files(self, application=True, platform=True):
+ if application:
+ with open(os.path.join(self.tempdir, 'application.ini'), 'w') as f:
+ f.writelines(self.application_ini)
+ if platform:
+ with open(os.path.join(self.tempdir, 'platform.ini'), 'w') as f:
+ f.writelines(self.platform_ini)
+
+ def _check_version(self, version):
+ self.assertEqual(version.get('application_id'), 'AppID')
+ self.assertEqual(version.get('application_name'), 'AppName')
+ self.assertEqual(
+ version.get('application_display_name'), 'AppCodeName')
+ self.assertEqual(version.get('application_version'), 'AppVersion')
+ self.assertEqual(version.get('application_buildid'), 'AppBuildID')
+ self.assertEqual(
+ version.get('application_repository'), 'AppSourceRepo')
+ self.assertEqual(
+ version.get('application_changeset'), 'AppSourceStamp')
+ self.assertEqual(version.get('application_vendor'), 'AppVendor')
+ self.assertIsNone(version.get('platform_name'))
+ self.assertEqual(version.get('platform_buildid'), 'PlatformBuildID')
+ self.assertEqual(
+ version.get('platform_repository'), 'PlatformSourceRepo')
+ self.assertEqual(
+ version.get('platform_changeset'), 'PlatformSourceStamp')
+ self.assertIsNone(version.get('invalid_key'))
+ self.assertEqual(
+ version.get('platform_version'), 'PlatformMilestone')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/mozversion/tests/test_sources.py b/testing/mozbase/mozversion/tests/test_sources.py
new file mode 100644
index 000000000..6c663edd6
--- /dev/null
+++ b/testing/mozbase/mozversion/tests/test_sources.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import tempfile
+import unittest
+
+import mozfile
+
+from mozversion import errors, get_version
+
+
+class SourcesTest(unittest.TestCase):
+ """test getting version information from a sources xml"""
+
+ application_ini = """[App]\nName = B2G\n"""
+ platform_ini = """[Build]
+BuildID = PlatformBuildID
+SourceStamp = PlatformSourceStamp
+SourceRepository = PlatformSourceRepo
+"""
+ sources_xml = """<?xml version="1.0" ?><manifest>
+ <project path="build" revision="build_revision" />
+ <project path="gaia" revision="gaia_revision" />
+ <project path="gecko" revision="gecko_revision" />
+</manifest>
+"""
+
+ def setUp(self):
+ self.cwd = os.getcwd()
+ self.tempdir = tempfile.mkdtemp()
+
+ self.binary = os.path.join(self.tempdir, 'binary')
+ with open(self.binary, 'w') as f:
+ f.write('foobar')
+
+ def tearDown(self):
+ os.chdir(self.cwd)
+ mozfile.remove(self.tempdir)
+
+ def _write_conf_files(self, sources=True):
+ with open(os.path.join(self.tempdir, 'application.ini'), 'w') as f:
+ f.writelines(self.application_ini)
+ with open(os.path.join(self.tempdir, 'platform.ini'), 'w') as f:
+ f.writelines(self.platform_ini)
+ if sources:
+ with open(os.path.join(self.tempdir, 'sources.xml'), 'w') as f:
+ f.writelines(self.sources_xml)
+
+ def test_sources(self):
+ self._write_conf_files()
+
+ os.chdir(self.tempdir)
+ self._check_version(get_version(sources=os.path.join(self.tempdir,
+ 'sources.xml')))
+
+ def test_sources_in_current_directory(self):
+ self._write_conf_files()
+
+ os.chdir(self.tempdir)
+ self._check_version(get_version())
+
+ def test_invalid_sources_path(self):
+ """An invalid source path should cause an exception"""
+ self.assertRaises(errors.AppNotFoundError, get_version,
+ self.binary, os.path.join(self.tempdir, 'invalid'))
+
+ def test_without_sources_file(self):
+ """With a missing sources file no exception should be thrown"""
+ self._write_conf_files(sources=False)
+
+ get_version(self.binary)
+
+ def _check_version(self, version):
+ self.assertEqual(version.get('build_changeset'), 'build_revision')
+ self.assertEqual(version.get('gaia_changeset'), 'gaia_revision')
+ self.assertEqual(version.get('gecko_changeset'), 'gecko_revision')
+ self.assertIsNone(version.get('invalid_key'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/mozbase/packages.txt b/testing/mozbase/packages.txt
new file mode 100644
index 000000000..ba1f292ef
--- /dev/null
+++ b/testing/mozbase/packages.txt
@@ -0,0 +1,19 @@
+manifestparser.pth:testing/mozbase/manifestparser
+mozb2g.pth:testing/mozbase/mozb2g
+mozcrash.pth:testing/mozbase/mozcrash
+mozdebug.pth:testing/mozbase/mozdebug
+mozdevice.pth:testing/mozbase/mozdevice
+mozfile.pth:testing/mozbase/mozfile
+mozhttpd.pth:testing/mozbase/mozhttpd
+mozinfo.pth:testing/mozbase/mozinfo
+mozinstall.pth:testing/mozbase/mozinstall
+mozleak.pth:testing/mozbase/mozleak
+mozlog.pth:testing/mozbase/mozlog
+moznetwork.pth:testing/mozbase/moznetwork
+mozprocess.pth:testing/mozbase/mozprocess
+mozprofile.pth:testing/mozbase/mozprofile
+mozrunner.pth:testing/mozbase/mozrunner
+mozsystemmonitor.pth:testing/mozbase/mozsystemmonitor
+mozscreenshot.pth:testing/mozbase/mozscreenshot
+moztest.pth:testing/mozbase/moztest
+mozversion.pth:testing/mozbase/mozversion
diff --git a/testing/mozbase/setup_development.py b/testing/mozbase/setup_development.py
new file mode 100755
index 000000000..c048d504f
--- /dev/null
+++ b/testing/mozbase/setup_development.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Setup mozbase packages for development.
+
+Packages may be specified as command line arguments.
+If no arguments are given, install all packages.
+
+See https://wiki.mozilla.org/Auto-tools/Projects/Mozbase
+"""
+
+import os
+import subprocess
+import sys
+from optparse import OptionParser
+from subprocess import PIPE
+try:
+ from subprocess import check_call as call
+except ImportError:
+ from subprocess import call
+
+
+# directory containing this file
+here = os.path.dirname(os.path.abspath(__file__))
+
+# all python packages
+mozbase_packages = [i for i in os.listdir(here)
+ if os.path.exists(os.path.join(here, i, 'setup.py'))]
+
+# testing: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase#Tests
+test_packages = ["mock"]
+
+# documentation: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase#Documentation
+extra_packages = ["sphinx"]
+
+
+def cycle_check(order, dependencies):
+ """ensure no cyclic dependencies"""
+ order_dict = dict([(j, i) for i, j in enumerate(order)])
+ for package, deps in dependencies.items():
+ index = order_dict[package]
+ for d in deps:
+ assert index > order_dict[d], "Cyclic dependencies detected"
+
+
+def info(directory):
+ "get the package setup.py information"
+
+ assert os.path.exists(os.path.join(directory, 'setup.py'))
+
+ # setup the egg info
+ try:
+ call([sys.executable, 'setup.py', 'egg_info'],
+ cwd=directory, stdout=PIPE)
+ except subprocess.CalledProcessError:
+ print "Error running setup.py in %s" % directory
+ raise
+
+ # get the .egg-info directory
+ egg_info = [entry for entry in os.listdir(directory)
+ if entry.endswith('.egg-info')]
+ assert len(egg_info) == 1, 'Expected one .egg-info directory in %s, got: %s' % (directory,
+ egg_info)
+ egg_info = os.path.join(directory, egg_info[0])
+ assert os.path.isdir(egg_info), "%s is not a directory" % egg_info
+
+ # read the package information
+ pkg_info = os.path.join(egg_info, 'PKG-INFO')
+ info_dict = {}
+ for line in file(pkg_info).readlines():
+ if not line or line[0].isspace():
+ continue # XXX neglects description
+ assert ':' in line
+ key, value = [i.strip() for i in line.split(':', 1)]
+ info_dict[key] = value
+
+ return info_dict
+
+
+def get_dependencies(directory):
+ "returns the package name and dependencies given a package directory"
+
+ # get the package metadata
+ info_dict = info(directory)
+
+ # get the .egg-info directory
+ egg_info = [entry for entry in os.listdir(directory)
+ if entry.endswith('.egg-info')][0]
+
+ # read the dependencies
+ requires = os.path.join(directory, egg_info, 'requires.txt')
+ dependencies = []
+ if os.path.exists(requires):
+ for line in file(requires):
+ line = line.strip()
+ # in requires.txt file, a dependency is a non empty line
+ # Also lines like [device] are sections to mark optional
+ # dependencies, we don't want those sections.
+ if line and not (line.startswith('[') and line.endswith(']')):
+ dependencies.append(line)
+
+ # return the information
+ return info_dict['Name'], dependencies
+
+
+def dependency_info(dep):
+ "return dictionary of dependency information from a dependency string"
+ retval = dict(Name=None, Type=None, Version=None)
+ for joiner in ('==', '<=', '>='):
+ if joiner in dep:
+ retval['Type'] = joiner
+ name, version = [i.strip() for i in dep.split(joiner, 1)]
+ retval['Name'] = name
+ retval['Version'] = version
+ break
+ else:
+ retval['Name'] = dep.strip()
+ return retval
+
+
+def unroll_dependencies(dependencies):
+ """
+ unroll a set of dependencies to a flat list
+
+ dependencies = {'packageA': set(['packageB', 'packageC', 'packageF']),
+ 'packageB': set(['packageC', 'packageD', 'packageE', 'packageG']),
+ 'packageC': set(['packageE']),
+ 'packageE': set(['packageF', 'packageG']),
+ 'packageF': set(['packageG']),
+ 'packageX': set(['packageA', 'packageG'])}
+ """
+
+ order = []
+
+ # flatten all
+ packages = set(dependencies.keys())
+ for deps in dependencies.values():
+ packages.update(deps)
+
+ while len(order) != len(packages):
+
+ for package in packages.difference(order):
+ if set(dependencies.get(package, set())).issubset(order):
+ order.append(package)
+ break
+ else:
+ raise AssertionError("Cyclic dependencies detected")
+
+ cycle_check(order, dependencies) # sanity check
+
+ return order
+
+
+def main(args=sys.argv[1:]):
+
+ # parse command line options
+ usage = '%prog [options] [package] [package] [...]'
+ parser = OptionParser(usage=usage, description=__doc__)
+ parser.add_option('-d', '--dependencies', dest='list_dependencies',
+ action='store_true', default=False,
+ help="list dependencies for the packages")
+ parser.add_option('--list', action='store_true', default=False,
+ help="list what will be installed")
+ parser.add_option('--extra', '--install-extra-packages', action='store_true', default=False,
+ help="installs extra supporting packages as well as core mozbase ones")
+ options, packages = parser.parse_args(args)
+
+ if not packages:
+ # install all packages
+ packages = sorted(mozbase_packages)
+
+ # ensure specified packages are in the list
+ assert set(packages).issubset(mozbase_packages), \
+ "Packages should be in %s (You gave: %s)" % (mozbase_packages, packages)
+
+ if options.list_dependencies:
+ # list the package dependencies
+ for package in packages:
+ print '%s: %s' % get_dependencies(os.path.join(here, package))
+ parser.exit()
+
+ # gather dependencies
+ # TODO: version conflict checking
+ deps = {}
+ alldeps = {}
+ mapping = {} # mapping from subdir name to package name
+ # core dependencies
+ for package in packages:
+ key, value = get_dependencies(os.path.join(here, package))
+ deps[key] = [dependency_info(dep)['Name'] for dep in value]
+ mapping[package] = key
+
+ # keep track of all dependencies for non-mozbase packages
+ for dep in value:
+ alldeps[dependency_info(dep)['Name']] = ''.join(dep.split())
+
+ # indirect dependencies
+ flag = True
+ while flag:
+ flag = False
+ for value in deps.values():
+ for dep in value:
+ if dep in mozbase_packages and dep not in deps:
+ key, value = get_dependencies(os.path.join(here, dep))
+ deps[key] = [dep for dep in value]
+
+ for dep in value:
+ alldeps[dep] = ''.join(dep.split())
+ mapping[package] = key
+ flag = True
+ break
+ if flag:
+ break
+
+ # get the remaining names for the mapping
+ for package in mozbase_packages:
+ if package in mapping:
+ continue
+ key, value = get_dependencies(os.path.join(here, package))
+ mapping[package] = key
+
+ # unroll dependencies
+ unrolled = unroll_dependencies(deps)
+
+ # make a reverse mapping: package name -> subdirectory
+ reverse_mapping = dict([(j, i) for i, j in mapping.items()])
+
+ # we only care about dependencies in mozbase
+ unrolled = [package for package in unrolled if package in reverse_mapping]
+
+ if options.list:
+ # list what will be installed
+ for package in unrolled:
+ print package
+ parser.exit()
+
+ # set up the packages for development
+ for package in unrolled:
+ call([sys.executable, 'setup.py', 'develop', '--no-deps'],
+ cwd=os.path.join(here, reverse_mapping[package]))
+
+ # add the directory of sys.executable to path to aid the correct
+ # `easy_install` getting called
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=893878
+ os.environ['PATH'] = '%s%s%s' % (os.path.dirname(os.path.abspath(sys.executable)),
+ os.path.pathsep,
+ os.environ.get('PATH', '').strip(os.path.pathsep))
+
+ # install non-mozbase dependencies
+ # these need to be installed separately and the --no-deps flag
+ # subsequently used due to a bug in setuptools; see
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=759836
+ pypi_deps = dict([(i, j) for i, j in alldeps.items()
+ if i not in unrolled])
+ for package, version in pypi_deps.items():
+ # easy_install should be available since we rely on setuptools
+ call(['easy_install', version])
+
+ # install packages required for unit testing
+ for package in test_packages:
+ call(['easy_install', package])
+
+ # install extra non-mozbase packages if desired
+ if options.extra:
+ for package in extra_packages:
+ call(['easy_install', package])
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/test-manifest.ini b/testing/mozbase/test-manifest.ini
new file mode 100644
index 000000000..e4a5f1971
--- /dev/null
+++ b/testing/mozbase/test-manifest.ini
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# mozbase test manifest, in the format of
+# http://mozbase.readthedocs.org/en/latest/manifestparser.html
+
+# run with
+# https://github.com/mozilla/mozbase/blob/master/test.py
+
+[include:manifestparser/tests/manifest.ini]
+[include:mozcrash/tests/manifest.ini]
+[include:mozdevice/tests/manifest.ini]
+[include:mozfile/tests/manifest.ini]
+[include:mozhttpd/tests/manifest.ini]
+[include:mozinfo/tests/manifest.ini]
+[include:mozinstall/tests/manifest.ini]
+[include:mozlog/tests/manifest.ini]
+[include:moznetwork/tests/manifest.ini]
+[include:mozprocess/tests/manifest.ini]
+[include:mozprofile/tests/manifest.ini]
+[include:mozrunner/tests/manifest.ini]
+[include:moztest/tests/manifest.ini]
+[include:mozversion/tests/manifest.ini]
diff --git a/testing/mozbase/test.py b/testing/mozbase/test.py
new file mode 100755
index 000000000..013506fc6
--- /dev/null
+++ b/testing/mozbase/test.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+run mozbase tests from a manifest,
+by default https://github.com/mozilla/mozbase/blob/master/test-manifest.ini
+"""
+
+import imp
+import manifestparser
+import mozinfo
+import optparse
+import os
+import sys
+import unittest
+
+import mozlog
+from moztest.results import TestResultCollection
+from moztest.adapters.unit import StructuredTestRunner
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+def unittests(path):
+ """return the unittests in a .py file"""
+
+ path = os.path.abspath(path)
+ unittests = []
+ assert os.path.exists(path)
+ directory = os.path.dirname(path)
+ sys.path.insert(0, directory) # insert directory into path for top-level imports
+ modname = os.path.splitext(os.path.basename(path))[0]
+ module = imp.load_source(modname, path)
+ sys.path.pop(0) # remove directory from global path
+ loader = unittest.TestLoader()
+ suite = loader.loadTestsFromModule(module)
+ for test in suite:
+ unittests.append(test)
+ return unittests
+
+
+def main(args=sys.argv[1:]):
+
+ # parse command line options
+ usage = '%prog [options] manifest.ini <manifest.ini> <...>'
+ parser = optparse.OptionParser(usage=usage, description=__doc__)
+ parser.add_option('-b', "--binary",
+ dest="binary", help="Binary path",
+ metavar=None, default=None)
+ parser.add_option('--list', dest='list_tests',
+ action='store_true', default=False,
+ help="list paths of tests to be run")
+ mozlog.commandline.add_logging_group(parser)
+ options, args = parser.parse_args(args)
+ logger = mozlog.commandline.setup_logging("mozbase", options,
+ {"tbpl": sys.stdout})
+
+ # read the manifest
+ if args:
+ manifests = args
+ else:
+ manifests = [os.path.join(here, 'test-manifest.ini')]
+ missing = []
+ for manifest in manifests:
+ # ensure manifests exist
+ if not os.path.exists(manifest):
+ missing.append(manifest)
+ assert not missing, 'manifest(s) not found: %s' % ', '.join(missing)
+ manifest = manifestparser.TestManifest(manifests=manifests)
+
+ if options.binary:
+ # A specified binary should override the environment variable
+ os.environ['BROWSER_PATH'] = options.binary
+
+ # gather the tests
+ tests = manifest.active_tests(disabled=False, **mozinfo.info)
+ tests = [test['path'] for test in tests]
+ logger.suite_start(tests)
+
+ if options.list_tests:
+ # print test paths
+ print '\n'.join(tests)
+ sys.exit(0)
+
+ # create unittests
+ unittestlist = []
+ for test in tests:
+ unittestlist.extend(unittests(test))
+
+ # run the tests
+ suite = unittest.TestSuite(unittestlist)
+ runner = StructuredTestRunner(logger=logger)
+ unittest_results = runner.run(suite)
+ results = TestResultCollection.from_unittest_results(None, unittest_results)
+ logger.suite_end()
+
+ # exit according to results
+ sys.exit(1 if results.num_failures else 0)
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/mozbase/versioninfo.py b/testing/mozbase/versioninfo.py
new file mode 100755
index 000000000..82da432a4
--- /dev/null
+++ b/testing/mozbase/versioninfo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+List mozbase package dependencies or generate changelogs
+from commit messages.
+"""
+
+from collections import Iterable
+from distutils.version import StrictVersion
+import argparse
+import os
+import subprocess
+import sys
+
+import setup_development
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def run_hg(command):
+ command = command[:]
+ if not isinstance(command, Iterable):
+ command = command.split()
+ command.insert(0, 'hg')
+ try:
+ output = subprocess.check_output(command, cwd=here)
+ except subprocess.CalledProcessError:
+ sys.exit(1)
+ return output
+
+
+def changelog(args):
+ setup = os.path.join(args.module, 'setup.py')
+
+ def get_version_rev(v=None):
+ revisions = run_hg(['log', setup, '--template={rev},']).split(',')[:-1]
+ for rev in revisions:
+ diff = run_hg(['diff', '-c', rev, setup, '-U0'])
+ minus_version = None
+ plus_version = None
+ for line in diff.splitlines():
+ if line.startswith('-PACKAGE_VERSION'):
+ try:
+ minus_version = StrictVersion(line.split()[-1].strip('"\''))
+ except ValueError:
+ pass
+ elif line.startswith('+PACKAGE_VERSION'):
+ try:
+ plus_version = StrictVersion(line.split()[-1].strip('"\''))
+ except ValueError:
+ break
+
+ # make sure the change isn't a backout
+ if not minus_version or plus_version > minus_version:
+ if not v:
+ return rev
+
+ if StrictVersion(v) == plus_version:
+ return rev
+
+ print("Could not find %s revision for version %s." % (args.module, v or 'latest'))
+ sys.exit(1)
+
+ from_ref = args.from_ref or get_version_rev()
+ to_ref = args.to_ref or 'tip'
+
+ if '.' in from_ref:
+ from_ref = get_version_rev(from_ref)
+ if '.' in to_ref:
+ to_ref = get_version_rev(to_ref)
+
+ delim = '\x12\x59\x52\x99\x05'
+ changelog = run_hg(['log', '-r', '%s:children(%s)' % (to_ref, from_ref),
+ '--template={desc}%s' % delim,
+ '-M', args.module]).split(delim)[:-1]
+
+ def prettify(desc):
+ lines = desc.splitlines()
+ lines = [('* %s' if i == 0 else ' %s') % l for i, l in enumerate(lines)]
+ return '\n'.join(lines)
+
+ changelog = map(prettify, changelog)
+ print '\n'.join(changelog)
+
+
+def dependencies(args):
+ # get package information
+ info = {}
+ dependencies = {}
+ for package in setup_development.mozbase_packages:
+ directory = os.path.join(setup_development.here, package)
+ info[directory] = setup_development.info(directory)
+ name, _dependencies = setup_development.get_dependencies(directory)
+ assert name == info[directory]['Name']
+ dependencies[name] = _dependencies
+
+ # print package version information
+ for value in info.values():
+ print '%s %s : %s' % (value['Name'], value['Version'],
+ ', '.join(dependencies[value['Name']]))
+
+
+def main(args=sys.argv[1:]):
+ parser = argparse.ArgumentParser()
+ subcommands = parser.add_subparsers(help="Sub-commands")
+
+ p_deps = subcommands.add_parser('dependencies', help="Print dependencies.")
+ p_deps.set_defaults(func=dependencies)
+
+ p_changelog = subcommands.add_parser('changelog', help="Print a changelog.")
+ p_changelog.add_argument('module', help="Module to get changelog from.")
+ p_changelog.add_argument('--from', dest='from_ref', default=None,
+ help="Starting version or revision to list "
+ "changes from. [defaults to latest version]")
+ p_changelog.add_argument('--to', dest='to_ref', default=None,
+ help="Ending version or revision to list "
+ "changes to. [defaults to tip]")
+ p_changelog.set_defaults(func=changelog)
+
+ # default to showing dependencies
+ if args == []:
+ args.append('dependencies')
+ args = parser.parse_args(args)
+ args.func(args)
+
+
+if __name__ == '__main__':
+ main()